Transformer: add single-asset ultraHDR image to HLG HDR vid transcoding
PiperOrigin-RevId: 619138202
This commit is contained in:
parent
0f42dd4752
commit
65e94480f4
@ -161,6 +161,7 @@ public final class MimeTypes {
|
||||
// image/ MIME types
|
||||
|
||||
public static final String IMAGE_JPEG = BASE_TYPE_IMAGE + "/jpeg";
|
||||
@UnstableApi public static final String IMAGE_JPEG_R = BASE_TYPE_IMAGE + "/jpeg_r";
|
||||
@UnstableApi public static final String IMAGE_PNG = BASE_TYPE_IMAGE + "/png";
|
||||
@UnstableApi public static final String IMAGE_HEIF = BASE_TYPE_IMAGE + "/heif";
|
||||
@UnstableApi public static final String IMAGE_BMP = BASE_TYPE_IMAGE + "/bmp";
|
||||
|
Binary file not shown.
After Width: | Height: | Size: 8.1 MiB |
@ -226,7 +226,8 @@ public class BitmapPixelTestUtil {
|
||||
/**
|
||||
* Returns a grayscale bitmap from the Luma channel in the {@link ImageFormat#YUV_420_888} image.
|
||||
*/
|
||||
public static Bitmap createGrayscaleArgb8888BitmapFromYuv420888Image(Image image) {
|
||||
public static Bitmap createGrayscaleBitmapFromYuv420888Image(
|
||||
Image image, Bitmap.Config bitmapConfig) {
|
||||
int width = image.getWidth();
|
||||
int height = image.getHeight();
|
||||
assertThat(image.getPlanes()).hasLength(3);
|
||||
@ -247,7 +248,7 @@ public class BitmapPixelTestUtil {
|
||||
/* blue= */ lumaValue);
|
||||
}
|
||||
}
|
||||
return Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888);
|
||||
return Bitmap.createBitmap(colors, width, height, bitmapConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -26,6 +26,7 @@ import static org.junit.Assume.assumeFalse;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Bitmap.Config;
|
||||
import android.media.Image;
|
||||
import android.media.MediaFormat;
|
||||
import android.opengl.EGLContext;
|
||||
@ -69,6 +70,7 @@ public final class AndroidTestUtil {
|
||||
public static final String PNG_ASSET_URI_STRING = "asset:///media/png/media3test.png";
|
||||
public static final String JPG_ASSET_URI_STRING = "asset:///media/jpeg/london.jpg";
|
||||
public static final String JPG_PORTRAIT_ASSET_URI_STRING = "asset:///media/jpeg/tokyo.jpg";
|
||||
public static final String ULTRA_HDR_URI_STRING = "asset:///media/jpeg/ultraHDR.jpg";
|
||||
|
||||
public static final String MP4_TRIM_OPTIMIZATION_URI_STRING =
|
||||
"asset:///media/mp4/internal_emulator_transformer_output.mp4";
|
||||
@ -633,6 +635,12 @@ public final class AndroidTestUtil {
|
||||
|
||||
public static ImmutableList<Bitmap> extractBitmapsFromVideo(Context context, String filePath)
|
||||
throws IOException, InterruptedException {
|
||||
return extractBitmapsFromVideo(context, filePath, Config.ARGB_8888);
|
||||
}
|
||||
|
||||
public static ImmutableList<Bitmap> extractBitmapsFromVideo(
|
||||
Context context, String filePath, Bitmap.Config config)
|
||||
throws IOException, InterruptedException {
|
||||
// b/298599172 - runUntilComparisonFrameOrEnded fails on this device because reading decoder
|
||||
// output as a bitmap doesn't work.
|
||||
assumeFalse(Util.SDK_INT == 21 && Ascii.toLowerCase(Util.MODEL).contains("nexus"));
|
||||
@ -645,7 +653,7 @@ public final class AndroidTestUtil {
|
||||
if (image == null) {
|
||||
break;
|
||||
}
|
||||
bitmaps.add(BitmapPixelTestUtil.createGrayscaleArgb8888BitmapFromYuv420888Image(image));
|
||||
bitmaps.add(BitmapPixelTestUtil.createGrayscaleBitmapFromYuv420888Image(image, config));
|
||||
image.close();
|
||||
}
|
||||
}
|
||||
|
@ -34,6 +34,7 @@ import java.util.List;
|
||||
|
||||
/** Utility class for checking testing {@link EditedMediaItemSequence} instances. */
|
||||
public final class SequenceEffectTestUtil {
|
||||
public static final ImmutableList<Effect> NO_EFFECT = ImmutableList.of();
|
||||
private static final String PNG_ASSET_BASE_PATH =
|
||||
"test-generated-goldens/transformer_sequence_effect_test";
|
||||
public static final long SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS = 50;
|
||||
|
@ -29,6 +29,7 @@ import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_PORTRAIT_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
|
||||
@ -64,7 +65,6 @@ import org.junit.runner.RunWith;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class TransformerSequenceEffectTest {
|
||||
|
||||
private static final ImmutableList<Effect> NO_EFFECT = ImmutableList.of();
|
||||
private static final String OVERLAY_PNG_ASSET_PATH = "media/png/media3test.png";
|
||||
private static final int EXPORT_WIDTH = 360;
|
||||
private static final int EXPORT_HEIGHT = 240;
|
||||
|
@ -0,0 +1,198 @@
|
||||
/*
|
||||
* Copyright 2024 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.JPG_ASSET_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
|
||||
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.oneFrameFromImage;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.net.Uri;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.util.BitmapLoader;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.datasource.DataSourceBitmapLoader;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
import com.google.common.util.concurrent.SettableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TestName;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/**
|
||||
* Tests for Ultra HDR support in Transformer that can run on an emulator.
|
||||
*
|
||||
* <p>See {@code TransformerMhUltraHdrPixelTest} for other UltraHdr tests.
|
||||
*/
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class TransformerUltraHdrTest {
|
||||
|
||||
@Rule public final TestName testName = new TestName();
|
||||
private final Context context = ApplicationProvider.getApplicationContext();
|
||||
|
||||
private String testId;
|
||||
|
||||
@Before
|
||||
public void setUpTestId() {
|
||||
testId = testName.getMethodName();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportUltraHdrImage_withUltraHdrEnabledOnUnsupportedApiLevel_fallbackToExportSdr()
|
||||
throws Exception {
|
||||
assumeTrue(Util.SDK_INT < 34);
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false, oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, createDownscalingTransformer())
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(result.filePath).isNotNull();
|
||||
ColorInfo colorInfo =
|
||||
retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO).colorInfo;
|
||||
assertThat(colorInfo.colorSpace).isEqualTo(C.COLOR_SPACE_BT709);
|
||||
assertThat(colorInfo.colorTransfer).isEqualTo(C.COLOR_TRANSFER_SDR);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportUltraHdrImage_withUltraHdrAndTonemappingEnabled_exportsSdr() throws Exception {
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ true, oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, createDownscalingTransformer())
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(result.filePath).isNotNull();
|
||||
ColorInfo colorInfo =
|
||||
retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO).colorInfo;
|
||||
assertThat(colorInfo.colorSpace).isEqualTo(C.COLOR_SPACE_BT709);
|
||||
assertThat(colorInfo.colorTransfer).isEqualTo(C.COLOR_TRANSFER_SDR);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportUltraHdrImage_withUltraHdrDisabled_exportsSdr() throws Exception {
|
||||
Composition composition =
|
||||
new Composition.Builder(
|
||||
new EditedMediaItemSequence(oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT)))
|
||||
.build();
|
||||
|
||||
// Downscale source bitmap to avoid "video encoding format not supported" errors on emulators.
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, createDownscalingTransformer())
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(result.filePath).isNotNull();
|
||||
ColorInfo colorInfo =
|
||||
retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO).colorInfo;
|
||||
assertThat(colorInfo.colorSpace).isEqualTo(C.COLOR_SPACE_BT709);
|
||||
assertThat(colorInfo.colorTransfer).isEqualTo(C.COLOR_TRANSFER_SDR);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportNonUltraHdrImage_withUltraHdrEnabled_exportsSdr() throws Exception {
|
||||
Composition composition =
|
||||
createUltraHdrComposition(
|
||||
/* tonemap= */ false, oneFrameFromImage(JPG_ASSET_URI_STRING, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(result.filePath).isNotNull();
|
||||
ColorInfo colorInfo =
|
||||
retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO).colorInfo;
|
||||
assertThat(colorInfo.colorSpace).isEqualTo(C.COLOR_SPACE_BT709);
|
||||
assertThat(colorInfo.colorTransfer).isEqualTo(C.COLOR_TRANSFER_SDR);
|
||||
}
|
||||
|
||||
private static Composition createUltraHdrComposition(
|
||||
boolean tonemap, EditedMediaItem editedMediaItem, EditedMediaItem... editedMediaItems) {
|
||||
Composition.Builder builder =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems))
|
||||
.experimentalSetRetainHdrFromUltraHdrImage(true);
|
||||
if (tonemap) {
|
||||
builder.setHdrMode(HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private Transformer createDownscalingTransformer() {
|
||||
BitmapLoader downscalingBitmapLoader =
|
||||
new BitmapLoader() {
|
||||
|
||||
static final int DOWNSCALED_WIDTH_HEIGHT = 120;
|
||||
final BitmapLoader bitmapLoader;
|
||||
|
||||
{
|
||||
bitmapLoader = new DataSourceBitmapLoader(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsMimeType(String mimeType) {
|
||||
return bitmapLoader.supportsMimeType(mimeType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListenableFuture<Bitmap> decodeBitmap(byte[] data) {
|
||||
return bitmapLoader.decodeBitmap(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListenableFuture<Bitmap> loadBitmap(Uri uri) {
|
||||
SettableFuture<Bitmap> outputFuture = SettableFuture.create();
|
||||
try {
|
||||
Bitmap bitmap =
|
||||
Bitmap.createScaledBitmap(
|
||||
bitmapLoader.loadBitmap(uri).get(),
|
||||
DOWNSCALED_WIDTH_HEIGHT,
|
||||
DOWNSCALED_WIDTH_HEIGHT,
|
||||
/* filter= */ true);
|
||||
outputFuture.set(bitmap);
|
||||
return outputFuture;
|
||||
} catch (ExecutionException | InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return new Transformer.Builder(context)
|
||||
.setAssetLoaderFactory(new DefaultAssetLoaderFactory(context, downscalingBitmapLoader))
|
||||
.build();
|
||||
}
|
||||
}
|
@ -0,0 +1,141 @@
|
||||
/*
|
||||
* Copyright 2024 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
|
||||
package androidx.media3.transformer.mh;
|
||||
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.extractBitmapsFromVideo;
|
||||
import static androidx.media3.transformer.AndroidTestUtil.recordTestSkipped;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
|
||||
import static androidx.media3.transformer.SequenceEffectTestUtil.oneFrameFromImage;
|
||||
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Bitmap.Config;
|
||||
import androidx.annotation.RequiresApi;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil.DecoderQueryException;
|
||||
import androidx.media3.transformer.AndroidTestUtil;
|
||||
import androidx.media3.transformer.Composition;
|
||||
import androidx.media3.transformer.EditedMediaItem;
|
||||
import androidx.media3.transformer.EditedMediaItemSequence;
|
||||
import androidx.media3.transformer.ExportTestResult;
|
||||
import androidx.media3.transformer.Transformer;
|
||||
import androidx.media3.transformer.TransformerAndroidTestRunner;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import org.json.JSONException;
|
||||
import org.junit.AssumptionViolatedException;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TestName;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Tests for Ultra HDR support in Transformer that should only run in mobile harness. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class TransformerMhUltraHdrPixelTest {
|
||||
|
||||
private static final String PNG_ASSET_BASE_PATH =
|
||||
"test-generated-goldens/TransformerUltraHdrPixelTest";
|
||||
|
||||
@Rule public final TestName testName = new TestName();
|
||||
private final Context context = ApplicationProvider.getApplicationContext();
|
||||
|
||||
private String testId;
|
||||
|
||||
@Before
|
||||
public void setUpTestId() {
|
||||
testId = testName.getMethodName();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exportUltraHdrImage_withUltraHdrEnabledOnSupportedDevice_succeeds() throws Exception {
|
||||
assumeDeviceSupportsUltraHdrEditing();
|
||||
Composition composition =
|
||||
createUltraHdrComposition(oneFrameFromImage(ULTRA_HDR_URI_STRING, NO_EFFECT));
|
||||
|
||||
ExportTestResult result =
|
||||
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
|
||||
.build()
|
||||
.run(testId, composition);
|
||||
|
||||
assertThat(result.filePath).isNotNull();
|
||||
ColorInfo colorInfo =
|
||||
retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO).colorInfo;
|
||||
assertThat(colorInfo.colorSpace).isEqualTo(C.COLOR_SPACE_BT2020);
|
||||
assertThat(colorInfo.colorTransfer).isEqualTo(C.COLOR_TRANSFER_HLG);
|
||||
assertFp16BitmapsMatchExpectedAndSave(
|
||||
extractBitmapsFromVideo(context, result.filePath, Config.RGBA_F16), testId);
|
||||
}
|
||||
|
||||
@RequiresApi(29) // getBitmapAveragePixelAbsoluteDifferenceFp16()
|
||||
public static void assertFp16BitmapsMatchExpectedAndSave(
|
||||
List<Bitmap> actualBitmaps, String testId) throws IOException {
|
||||
for (int i = 0; i < actualBitmaps.size(); i++) {
|
||||
maybeSaveTestBitmap(
|
||||
testId, /* bitmapLabel= */ String.valueOf(i), actualBitmaps.get(i), /* path= */ null);
|
||||
}
|
||||
|
||||
for (int i = 0; i < actualBitmaps.size(); i++) {
|
||||
String subTestId = testId + "_" + i;
|
||||
String expectedPath = Util.formatInvariant("%s/%s.png", PNG_ASSET_BASE_PATH, subTestId);
|
||||
Bitmap expectedBitmap = readBitmap(expectedPath);
|
||||
|
||||
float averagePixelAbsoluteDifference =
|
||||
getBitmapAveragePixelAbsoluteDifferenceFp16(expectedBitmap, actualBitmaps.get(i));
|
||||
assertWithMessage("For expected bitmap " + expectedPath)
|
||||
.that(averagePixelAbsoluteDifference)
|
||||
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_LUMA);
|
||||
}
|
||||
}
|
||||
|
||||
private void assumeDeviceSupportsUltraHdrEditing()
|
||||
throws JSONException, IOException, DecoderQueryException {
|
||||
if (Util.SDK_INT < 34) {
|
||||
recordTestSkipped(
|
||||
getApplicationContext(), testId, "Ultra HDR is not supported on this API level.");
|
||||
throw new AssumptionViolatedException("Ultra HDR is not supported on this API level.");
|
||||
}
|
||||
AndroidTestUtil.assumeFormatsSupported(
|
||||
context,
|
||||
testId,
|
||||
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT,
|
||||
/* outputFormat= */ null);
|
||||
}
|
||||
|
||||
private static Composition createUltraHdrComposition(
|
||||
EditedMediaItem editedMediaItem, EditedMediaItem... editedMediaItems) {
|
||||
Composition.Builder builder =
|
||||
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem, editedMediaItems))
|
||||
.experimentalSetRetainHdrFromUltraHdrImage(true);
|
||||
return builder.build();
|
||||
}
|
||||
}
|
@ -24,6 +24,7 @@ import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.transformer.Composition.HdrMode;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
@ -144,14 +145,12 @@ public interface AssetLoader {
|
||||
*/
|
||||
/* package */ class CompositionSettings {
|
||||
|
||||
public final @Composition.HdrMode int hdrMode;
|
||||
public final @HdrMode int hdrMode;
|
||||
public final boolean retainHdrFromUltraHdrImage;
|
||||
|
||||
public CompositionSettings() {
|
||||
this.hdrMode = Composition.HDR_MODE_KEEP_HDR;
|
||||
}
|
||||
|
||||
public CompositionSettings(@Composition.HdrMode int hdrMode) {
|
||||
public CompositionSettings(@HdrMode int hdrMode, boolean retainHdrFromUltraHdrImage) {
|
||||
this.hdrMode = hdrMode;
|
||||
this.retainHdrFromUltraHdrImage = retainHdrFromUltraHdrImage;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,6 +50,7 @@ public final class Composition {
|
||||
private boolean transmuxAudio;
|
||||
private boolean transmuxVideo;
|
||||
private @HdrMode int hdrMode;
|
||||
private boolean retainHdrFromUltraHdrImage;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
@ -88,6 +89,7 @@ public final class Composition {
|
||||
transmuxAudio = composition.transmuxAudio;
|
||||
transmuxVideo = composition.transmuxVideo;
|
||||
hdrMode = composition.hdrMode;
|
||||
retainHdrFromUltraHdrImage = composition.retainHdrFromUltraHdrImage;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -230,6 +232,33 @@ public final class Composition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether to use produce an HDR output video from Ultra HDR image input.
|
||||
*
|
||||
* <p>If the {@link HdrMode} is {@link #HDR_MODE_KEEP_HDR}, then setting this to {@code true}
|
||||
* applies the recovery map (i.e. the gainmap) to the base image to produce HDR video frames.
|
||||
*
|
||||
* <p>The output video will have the same color encoding as the first {@link EditedMediaItem}
|
||||
* the sequence. If the Ultra HDR image is first in the sequence, output video will default to
|
||||
* BT2020 HLG full range colors.
|
||||
*
|
||||
* <p>Ignored if {@link HdrMode} is not {@link #HDR_MODE_KEEP_HDR}.
|
||||
*
|
||||
* <p>Supported on API 34+, by some device and HDR format combinations. Ignored if unsupported
|
||||
* by device or API level.
|
||||
*
|
||||
* <p>The default value is {@code false}.
|
||||
*
|
||||
* @param retainHdrFromUltraHdrImage Whether to use produce an HDR output video from Ultra HDR
|
||||
* image input.
|
||||
* @return This builder.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
public Builder experimentalSetRetainHdrFromUltraHdrImage(boolean retainHdrFromUltraHdrImage) {
|
||||
this.retainHdrFromUltraHdrImage = retainHdrFromUltraHdrImage;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Builds a {@link Composition} instance. */
|
||||
public Composition build() {
|
||||
return new Composition(
|
||||
@ -239,7 +268,8 @@ public final class Composition {
|
||||
forceAudioTrack,
|
||||
transmuxAudio,
|
||||
transmuxVideo,
|
||||
hdrMode);
|
||||
hdrMode,
|
||||
retainHdrFromUltraHdrImage && hdrMode == HDR_MODE_KEEP_HDR);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -377,6 +407,14 @@ public final class Composition {
|
||||
*/
|
||||
public final @HdrMode int hdrMode;
|
||||
|
||||
/**
|
||||
* Sets whether to use produce an HDR output video from Ultra HDR image input.
|
||||
*
|
||||
* <p>For more information, see {@link
|
||||
* Builder#experimentalSetRetainHdrFromUltraHdrImage(boolean)}.
|
||||
*/
|
||||
public final boolean retainHdrFromUltraHdrImage;
|
||||
|
||||
/** Returns a {@link Composition.Builder} initialized with the values of this instance. */
|
||||
/* package */ Builder buildUpon() {
|
||||
return new Builder(this);
|
||||
@ -389,7 +427,8 @@ public final class Composition {
|
||||
boolean forceAudioTrack,
|
||||
boolean transmuxAudio,
|
||||
boolean transmuxVideo,
|
||||
@HdrMode int hdrMode) {
|
||||
@HdrMode int hdrMode,
|
||||
boolean retainHdrFromUltraHdrImage) {
|
||||
checkArgument(
|
||||
!transmuxAudio || !forceAudioTrack,
|
||||
"Audio transmuxing and audio track forcing are not allowed together.");
|
||||
@ -400,5 +439,6 @@ public final class Composition {
|
||||
this.transmuxVideo = transmuxVideo;
|
||||
this.forceAudioTrack = forceAudioTrack;
|
||||
this.hdrMode = hdrMode;
|
||||
this.retainHdrFromUltraHdrImage = retainHdrFromUltraHdrImage;
|
||||
}
|
||||
}
|
||||
|
@ -92,13 +92,9 @@ public final class DefaultAssetLoaderFactory implements AssetLoader.Factory {
|
||||
* The frame loaded is determined by the {@link BitmapLoader} implementation.
|
||||
*
|
||||
* @param context The {@link Context}.
|
||||
* @param hdrMode The {@link Composition.HdrMode} to apply. Only {@link
|
||||
* Composition#HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC} and {@link
|
||||
* Composition#HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR} are applied.
|
||||
* @param bitmapLoader The {@link BitmapLoader} to use to load and decode images.
|
||||
*/
|
||||
public DefaultAssetLoaderFactory(
|
||||
Context context, @Composition.HdrMode int hdrMode, BitmapLoader bitmapLoader) {
|
||||
public DefaultAssetLoaderFactory(Context context, BitmapLoader bitmapLoader) {
|
||||
this.context = context.getApplicationContext();
|
||||
this.decoderFactory = new DefaultDecoderFactory(context);
|
||||
this.clock = Clock.DEFAULT;
|
||||
|
@ -38,6 +38,7 @@ import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.BitmapLoader;
|
||||
import androidx.media3.common.util.ConstantRateTimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.transformer.SampleConsumer.InputResult;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.util.concurrent.FutureCallback;
|
||||
@ -56,6 +57,8 @@ import java.util.concurrent.ScheduledExecutorService;
|
||||
@UnstableApi
|
||||
public final class ImageAssetLoader implements AssetLoader {
|
||||
|
||||
private final boolean retainHdrFromUltraHdrImage;
|
||||
|
||||
/** An {@link AssetLoader.Factory} for {@link ImageAssetLoader} instances. */
|
||||
public static final class Factory implements AssetLoader.Factory {
|
||||
|
||||
@ -76,7 +79,8 @@ public final class ImageAssetLoader implements AssetLoader {
|
||||
Looper looper,
|
||||
Listener listener,
|
||||
CompositionSettings compositionSettings) {
|
||||
return new ImageAssetLoader(editedMediaItem, listener, bitmapLoader);
|
||||
return new ImageAssetLoader(
|
||||
editedMediaItem, listener, bitmapLoader, compositionSettings.retainHdrFromUltraHdrImage);
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,7 +99,11 @@ public final class ImageAssetLoader implements AssetLoader {
|
||||
private volatile int progress;
|
||||
|
||||
private ImageAssetLoader(
|
||||
EditedMediaItem editedMediaItem, Listener listener, BitmapLoader bitmapLoader) {
|
||||
EditedMediaItem editedMediaItem,
|
||||
Listener listener,
|
||||
BitmapLoader bitmapLoader,
|
||||
boolean retainHdrFromUltraHdrImage) {
|
||||
this.retainHdrFromUltraHdrImage = retainHdrFromUltraHdrImage;
|
||||
checkState(editedMediaItem.durationUs != C.TIME_UNSET);
|
||||
checkState(editedMediaItem.frameRate != C.RATE_UNSET_INT);
|
||||
this.editedMediaItem = editedMediaItem;
|
||||
@ -124,16 +132,20 @@ public final class ImageAssetLoader implements AssetLoader {
|
||||
@Override
|
||||
public void onSuccess(Bitmap bitmap) {
|
||||
progress = 50;
|
||||
try {
|
||||
Format format =
|
||||
Format inputFormat =
|
||||
new Format.Builder()
|
||||
.setHeight(bitmap.getHeight())
|
||||
.setWidth(bitmap.getWidth())
|
||||
.setSampleMimeType(MIME_TYPE_IMAGE_ALL)
|
||||
.setColorInfo(ColorInfo.SRGB_BT709_FULL)
|
||||
.build();
|
||||
listener.onTrackAdded(format, SUPPORTED_OUTPUT_TYPE_DECODED);
|
||||
scheduledExecutorService.submit(() -> queueBitmapInternal(bitmap, format));
|
||||
Format outputFormat =
|
||||
retainHdrFromUltraHdrImage && Util.SDK_INT >= 34 && bitmap.hasGainmap()
|
||||
? inputFormat.buildUpon().setSampleMimeType(MimeTypes.IMAGE_JPEG_R).build()
|
||||
: inputFormat;
|
||||
try {
|
||||
listener.onTrackAdded(inputFormat, SUPPORTED_OUTPUT_TYPE_DECODED);
|
||||
scheduledExecutorService.submit(() -> queueBitmapInternal(bitmap, outputFormat));
|
||||
} catch (RuntimeException e) {
|
||||
listener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
|
||||
}
|
||||
|
@ -238,7 +238,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
sequence,
|
||||
composition.forceAudioTrack,
|
||||
assetLoaderFactory,
|
||||
new CompositionSettings(transformationRequest.hdrMode),
|
||||
new CompositionSettings(
|
||||
transformationRequest.hdrMode, composition.retainHdrFromUltraHdrImage),
|
||||
sequenceAssetLoaderListener,
|
||||
clock,
|
||||
internalLooper));
|
||||
@ -689,17 +690,23 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
muxerWrapper,
|
||||
fallbackListener));
|
||||
} else {
|
||||
ColorInfo decoderOutputColor;
|
||||
Format firstFormat;
|
||||
if (MimeTypes.isVideo(assetLoaderOutputFormat.sampleMimeType)) {
|
||||
// TODO(b/267301878): Pass firstAssetLoaderOutputFormat once surface creation not in VSP.
|
||||
boolean isMediaCodecToneMappingRequested =
|
||||
transformationRequest.hdrMode == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC;
|
||||
decoderOutputColor =
|
||||
ColorInfo decoderOutputColor =
|
||||
getDecoderOutputColor(
|
||||
getValidColor(firstAssetLoaderInputFormat.colorInfo),
|
||||
isMediaCodecToneMappingRequested);
|
||||
firstFormat =
|
||||
firstAssetLoaderInputFormat.buildUpon().setColorInfo(decoderOutputColor).build();
|
||||
} else if (MimeTypes.isImage(assetLoaderOutputFormat.sampleMimeType)) {
|
||||
decoderOutputColor = getValidColor(assetLoaderOutputFormat.colorInfo);
|
||||
firstFormat =
|
||||
firstAssetLoaderInputFormat
|
||||
.buildUpon()
|
||||
.setColorInfo(getValidColor(assetLoaderOutputFormat.colorInfo))
|
||||
.build();
|
||||
} else {
|
||||
throw ExportException.createForUnexpected(
|
||||
new IllegalArgumentException(
|
||||
@ -710,7 +717,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
C.TRACK_TYPE_VIDEO,
|
||||
new VideoSampleExporter(
|
||||
context,
|
||||
firstAssetLoaderInputFormat.buildUpon().setColorInfo(decoderOutputColor).build(),
|
||||
firstFormat,
|
||||
transformationRequest,
|
||||
composition.videoCompositorSettings,
|
||||
composition.effects.videoEffects,
|
||||
|
@ -16,6 +16,9 @@
|
||||
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.C.COLOR_RANGE_FULL;
|
||||
import static androidx.media3.common.C.COLOR_SPACE_BT2020;
|
||||
import static androidx.media3.common.C.COLOR_TRANSFER_HLG;
|
||||
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
|
||||
import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
|
||||
import static androidx.media3.common.ColorInfo.isTransferHdr;
|
||||
@ -52,6 +55,7 @@ import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.checkerframework.checker.initialization.qual.Initialized;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.dataflow.qual.Pure;
|
||||
@ -94,26 +98,40 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
||||
finalFramePresentationTimeUs = C.TIME_UNSET;
|
||||
|
||||
ColorInfo videoGraphInputColor = checkNotNull(firstInputFormat.colorInfo);
|
||||
ColorInfo videoGraphOutputColor;
|
||||
if (videoGraphInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) {
|
||||
// The sRGB color transfer is only used for images.
|
||||
// When an Ultra HDR image transcoded into a video, we use BT2020 HLG full range colors in the
|
||||
// resulting HDR video.
|
||||
// When an SDR image gets transcoded into a video, we use the SMPTE 170M transfer function for
|
||||
// the resulting video.
|
||||
videoGraphOutputColor =
|
||||
Objects.equals(firstInputFormat.sampleMimeType, MimeTypes.IMAGE_JPEG_R)
|
||||
? new ColorInfo.Builder()
|
||||
.setColorSpace(COLOR_SPACE_BT2020)
|
||||
.setColorTransfer(COLOR_TRANSFER_HLG)
|
||||
.setColorRange(COLOR_RANGE_FULL)
|
||||
.build()
|
||||
: SDR_BT709_LIMITED;
|
||||
} else {
|
||||
videoGraphOutputColor = videoGraphInputColor;
|
||||
}
|
||||
|
||||
encoderWrapper =
|
||||
new EncoderWrapper(
|
||||
encoderFactory,
|
||||
firstInputFormat,
|
||||
firstInputFormat.buildUpon().setColorInfo(videoGraphOutputColor).build(),
|
||||
muxerWrapper.getSupportedSampleMimeTypes(C.TRACK_TYPE_VIDEO),
|
||||
transformationRequest,
|
||||
fallbackListener);
|
||||
encoderOutputBuffer =
|
||||
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||
|
||||
ColorInfo videoGraphInputColor = checkNotNull(firstInputFormat.colorInfo);
|
||||
boolean isGlToneMapping =
|
||||
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL
|
||||
&& ColorInfo.isTransferHdr(videoGraphInputColor);
|
||||
ColorInfo videoGraphOutputColor;
|
||||
if (videoGraphInputColor.colorTransfer == C.COLOR_TRANSFER_SRGB) {
|
||||
// The sRGB color transfer is only used for images, so when an image gets transcoded into a
|
||||
// video, we use the SMPTE 170M transfer function for the resulting video.
|
||||
videoGraphOutputColor = SDR_BT709_LIMITED;
|
||||
} else if (isGlToneMapping) {
|
||||
if (isGlToneMapping) {
|
||||
// For consistency with the Android platform, OpenGL tone mapping outputs colors with
|
||||
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
|
||||
// C.COLOR_TRANSFER_SDR to the encoder.
|
||||
@ -123,8 +141,6 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
.setColorRange(C.COLOR_RANGE_LIMITED)
|
||||
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
|
||||
.build();
|
||||
} else {
|
||||
videoGraphOutputColor = videoGraphInputColor;
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -145,7 +145,12 @@ public class ExoPlayerAssetLoaderTest {
|
||||
EditedMediaItem editedMediaItem =
|
||||
new EditedMediaItem.Builder(MediaItem.fromUri("asset:///media/mp4/sample.mp4")).build();
|
||||
return new ExoPlayerAssetLoader.Factory(context, decoderFactory, clock)
|
||||
.createAssetLoader(editedMediaItem, Looper.myLooper(), listener, new CompositionSettings());
|
||||
.createAssetLoader(
|
||||
editedMediaItem,
|
||||
Looper.myLooper(),
|
||||
listener,
|
||||
new CompositionSettings(
|
||||
Composition.HDR_MODE_KEEP_HDR, /* retainHdrFromUltraHdrImage= */ false));
|
||||
}
|
||||
|
||||
private static final class FakeSampleConsumer implements SampleConsumer {
|
||||
|
@ -123,7 +123,12 @@ public class ImageAssetLoaderTest {
|
||||
.build();
|
||||
return new ImageAssetLoader.Factory(
|
||||
new DataSourceBitmapLoader(ApplicationProvider.getApplicationContext()))
|
||||
.createAssetLoader(editedMediaItem, Looper.myLooper(), listener, new CompositionSettings());
|
||||
.createAssetLoader(
|
||||
editedMediaItem,
|
||||
Looper.myLooper(),
|
||||
listener,
|
||||
new CompositionSettings(
|
||||
Composition.HDR_MODE_KEEP_HDR, /* retainHdrFromUltraHdrImage= */ false));
|
||||
}
|
||||
|
||||
private static final class FakeSampleConsumer implements SampleConsumer {
|
||||
|
Loading…
x
Reference in New Issue
Block a user