Merge pull request #1235 from DolbyLaboratories:dlb/dovi-transformer/dev

PiperOrigin-RevId: 718856455
This commit is contained in:
Copybara-Service 2025-01-23 07:26:21 -08:00
commit c1242ffef1
8 changed files with 281 additions and 38 deletions

View File

@ -41,6 +41,8 @@
while `InAppFragmentedMp4Muxer` is to be used for producing a fragmented
MP4 file.
* Move `Muxer` interface from `media3-muxer` to `media3-transformer`.
* Add support for transcoding and transmuxing Dolby Vision (profile 8)
format.
* Track Selection:
* Extractors:
* Fix handling of NAL units with lengths expressed in 1 or 2 bytes (rather

View File

@ -266,7 +266,8 @@ public final class ConfigurationActivity extends AppCompatActivity {
MimeTypes.VIDEO_H264,
MimeTypes.VIDEO_H265,
MimeTypes.VIDEO_MP4V,
MimeTypes.VIDEO_AV1);
MimeTypes.VIDEO_AV1,
MimeTypes.VIDEO_DOLBY_VISION);
ArrayAdapter<String> resolutionHeightAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);

View File

@ -1776,33 +1776,46 @@ public class TransformerEndToEndTest {
}
@Test
public void dolbyVisionVideo_noEffects_transmuxesToHevc() throws Exception {
assumeTrue("This test requires B-frame support", Util.SDK_INT > 24);
assumeTrue(
new DefaultMuxer.Factory()
.getSupportedSampleMimeTypes(C.TRACK_TYPE_VIDEO)
.contains(MimeTypes.VIDEO_H265));
public void transmuxDolbyVisionVideo_whenMuxerDoesNotSupportDolbyVision_transmuxesToHevc()
throws Exception {
// Hevc support is available from API 24.
// The asset has B-frames and B-frame support is available from API 25.
// Dolby vision support is available from API 33.
assumeTrue(Util.SDK_INT >= 25 && Util.SDK_INT < 33);
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri)))
.setRemoveAudio(true)
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(
context,
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H265).build())
new TransformerAndroidTestRunner.Builder(context, new Transformer.Builder(context).build())
.build()
.run(testId, editedMediaItem);
MediaExtractorCompat mediaExtractor = new MediaExtractorCompat(context);
mediaExtractor.setDataSource(Uri.parse(result.filePath), /* offset= */ 0);
checkState(mediaExtractor.getTrackCount() == 1);
MediaFormat mediaFormat = mediaExtractor.getTrackFormat(/* trackIndex= */ 0);
Format format = createFormatFromMediaFormat(mediaFormat);
Format format = retrieveTrackFormat(context, result.filePath, C.TRACK_TYPE_VIDEO);
assertThat(format.sampleMimeType).isEqualTo(MimeTypes.VIDEO_H265);
assertThat(result.exportResult.videoConversionProcess).isEqualTo(CONVERSION_PROCESS_TRANSMUXED);
}
@Test
public void transmuxDolbyVisionVideo_transmuxesSuccessfully() throws Exception {
assumeTrue("Dolby vision support available from API 33", Util.SDK_INT >= 33);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri));
ExportTestResult exportTestResult =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, mediaItem);
Format trackFormat =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO);
assertThat(trackFormat.sampleMimeType).isEqualTo(MimeTypes.VIDEO_DOLBY_VISION);
assertThat(trackFormat.codecs).isEqualTo("dvhe.08.02");
assertThat(exportTestResult.exportResult.videoConversionProcess)
.isEqualTo(CONVERSION_PROCESS_TRANSMUXED);
}
@Test
public void dolbyVisionVideo_noEffects_withInAppMuxer_transmuxesToHevc() throws Exception {
EditedMediaItem editedMediaItem =

View File

@ -66,7 +66,11 @@ public final class HdrCapabilitiesUtil {
checkState(ColorInfo.isTransferHdr(format.colorInfo));
if (EncoderUtil.getSupportedEncodersForHdrEditing(format.sampleMimeType, format.colorInfo)
.isEmpty()) {
String skipReason = "No HDR editing support for " + format.colorInfo;
String skipReason =
"No HDR editing supported for sample mime type "
+ format.sampleMimeType
+ " and color info "
+ format.colorInfo;
recordTestSkipped(getApplicationContext(), testId, skipReason);
throw new AssumptionViolatedException(skipReason);
}
@ -82,7 +86,11 @@ public final class HdrCapabilitiesUtil {
checkState(ColorInfo.isTransferHdr(format.colorInfo));
if (!EncoderUtil.getSupportedEncodersForHdrEditing(format.sampleMimeType, format.colorInfo)
.isEmpty()) {
String skipReason = "HDR editing support for " + format.colorInfo;
String skipReason =
"HDR editing supported for sample mime type "
+ format.sampleMimeType
+ " and color info "
+ format.colorInfo;
recordTestSkipped(getApplicationContext(), testId, skipReason);
throw new AssumptionViolatedException(skipReason);
}

View File

@ -50,6 +50,7 @@ import androidx.media3.transformer.Composition;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EncoderUtil;
import androidx.media3.transformer.ExportException;
import androidx.media3.transformer.ExportResult;
import androidx.media3.transformer.ExportTestResult;
import androidx.media3.transformer.TransformationRequest;
import androidx.media3.transformer.Transformer;
@ -207,15 +208,72 @@ public final class HdrEditingTest {
}
@Test
public void exportAndTranscode_dolbyVisionFile_whenHdrEditingIsSupported() throws Exception {
public void exportAndTranscode_hlg10VideoToDolbyVision_whenDolbyVisionSupported()
throws Exception {
Context context = ApplicationProvider.getApplicationContext();
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10.videoFormat,
/* outputFormat= */ MP4_ASSET_1080P_5_SECOND_HLG10
.videoFormat
.buildUpon()
.setSampleMimeType(MimeTypes.VIDEO_DOLBY_VISION)
.setCodecs(null)
.build());
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_1080P_5_SECOND_HLG10.uri));
EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(mediaItem).build();
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_DOLBY_VISION).build();
ExportTestResult exportTestResult =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
ExportResult exportResult = exportTestResult.exportResult;
assertThat(exportResult.videoMimeType).isEqualTo(MimeTypes.VIDEO_DOLBY_VISION);
Format videoTrackFormat =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO);
assertThat(videoTrackFormat.sampleMimeType).isEqualTo(MimeTypes.VIDEO_DOLBY_VISION);
int actualColorTransfer = videoTrackFormat.colorInfo.colorTransfer;
assertThat(actualColorTransfer).isEqualTo(C.COLOR_TRANSFER_HLG);
}
@Test
public void exportAndTranscode_dolbyVisionFileToDolbyVision_whenDolbyVisionSupported()
throws Exception {
Context context = ApplicationProvider.getApplicationContext();
Format format = MP4_ASSET_DOLBY_VISION_HDR.videoFormat;
// Check HDR support for both VIDEO_DOLBY_VISION and VIDEO_H265 mime types.
if (EncoderUtil.getSupportedEncodersForHdrEditing(format.sampleMimeType, format.colorInfo)
.isEmpty()) {
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ format);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
ExportTestResult exportTestResult =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
ExportResult exportResult = exportTestResult.exportResult;
assertThat(exportResult.videoMimeType).isEqualTo(MimeTypes.VIDEO_DOLBY_VISION);
Format videoTrackFormat =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO);
assertThat(videoTrackFormat.sampleMimeType).isEqualTo(MimeTypes.VIDEO_DOLBY_VISION);
int actualColorTransfer = videoTrackFormat.colorInfo.colorTransfer;
assertThat(actualColorTransfer).isEqualTo(C.COLOR_TRANSFER_HLG);
}
@Test
public void
exportAndTranscode_dolbyVisionFileToHlg_whenDolbyVisionIsNotSupportedAndHlgIsSupported()
throws Exception {
Context context = ApplicationProvider.getApplicationContext();
Format format = MP4_ASSET_DOLBY_VISION_HDR.videoFormat;
assumeDeviceDoesNotSupportHdrEditing(testId, format);
assumeDeviceSupportsHdrEditing(
testId, format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_H265).build());
}
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
Transformer transformer = new Transformer.Builder(context).build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri));
@ -226,11 +284,13 @@ public final class HdrEditingTest {
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
@C.ColorTransfer
int actualColorTransfer =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO)
.colorInfo
.colorTransfer;
ExportResult exportResult = exportTestResult.exportResult;
assertThat(exportResult.videoMimeType).isEqualTo(MimeTypes.VIDEO_H265);
Format videoTrackFormat =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO);
assertThat(videoTrackFormat.sampleMimeType).isEqualTo(MimeTypes.VIDEO_H265);
int actualColorTransfer = videoTrackFormat.colorInfo.colorTransfer;
assertThat(actualColorTransfer).isEqualTo(C.COLOR_TRANSFER_HLG);
}
@ -428,6 +488,66 @@ public final class HdrEditingTest {
}
}
@Test
public void exportAndTranscode_dolbyVisionFile_whenHdrEditingUnsupported_toneMapsOrThrows()
throws Exception {
Context context = ApplicationProvider.getApplicationContext();
Format format = MP4_ASSET_DOLBY_VISION_HDR.videoFormat;
// Check HDR support for both VIDEO_DOLBY_VISION and VIDEO_H265 mime types.
assumeDeviceDoesNotSupportHdrEditing(testId, format);
assumeDeviceDoesNotSupportHdrEditing(
testId, format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_H265).build());
assumeFormatsSupported(context, testId, /* inputFormat= */ format, /* outputFormat= */ null);
AtomicBoolean isFallbackListenerInvoked = new AtomicBoolean();
AtomicBoolean isToneMappingFallbackApplied = new AtomicBoolean();
Transformer transformer =
new Transformer.Builder(context)
.addListener(
new Transformer.Listener() {
@Override
public void onFallbackApplied(
Composition composition,
TransformationRequest originalTransformationRequest,
TransformationRequest fallbackTransformationRequest) {
isFallbackListenerInvoked.set(true);
assertThat(originalTransformationRequest.hdrMode).isEqualTo(HDR_MODE_KEEP_HDR);
isToneMappingFallbackApplied.set(
fallbackTransformationRequest.hdrMode
== HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL);
}
})
.build();
MediaItem mediaItem = MediaItem.fromUri(Uri.parse(MP4_ASSET_DOLBY_VISION_HDR.uri));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(mediaItem).setEffects(FORCE_TRANSCODE_VIDEO_EFFECTS).build();
try {
ExportTestResult exportTestResult =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(isToneMappingFallbackApplied.get()).isTrue();
@C.ColorTransfer
int actualColorTransfer =
retrieveTrackFormat(context, exportTestResult.filePath, C.TRACK_TYPE_VIDEO)
.colorInfo
.colorTransfer;
assertThat(actualColorTransfer).isEqualTo(C.COLOR_TRANSFER_SDR);
} catch (ExportException exception) {
if (exception.getCause() != null) {
@Nullable String message = exception.getCause().getMessage();
if (message != null
&& (Objects.equals(message, "Decoding HDR is not supported on this device.")
|| message.contains(
"OpenGL ES 3.0 context support is required for HDR input or output.")
|| Objects.equals(message, "Device lacks YUV extension support."))) {
return;
}
}
throw exception;
}
}
private static List<Byte> byteList(ByteBuffer buffer) {
ArrayList<Byte> outputBytes = new ArrayList<>();
while (buffer.hasRemaining()) {

View File

@ -339,6 +339,12 @@ public final class DefaultDecoderFactory implements Codec.DecoderFactory {
}
}
// MediaFormat#KEY_COLOR_TRANSFER_REQUEST is available from API 31.
if (SDK_INT >= 31 && decoderInfos.get(0).codecMimeType.equals(MimeTypes.VIDEO_DOLBY_VISION)) {
// Ignore the dolby vision dynamic metadata.
mediaFormat.setInteger(
MediaFormat.KEY_COLOR_TRANSFER_REQUEST, MediaFormat.COLOR_TRANSFER_HLG);
}
List<ExportException> codecInitExceptions = new ArrayList<>();
DefaultCodec codec =
createCodecFromDecoderInfos(

View File

@ -112,18 +112,27 @@ public final class EncoderUtil {
@RequiresApi(33)
public static boolean isHdrEditingSupported(
MediaCodecInfo mediaCodecInfo, String mimeType, ColorInfo colorInfo) {
ImmutableList<Integer> allowedColorProfiles =
getCodecProfilesForHdrFormat(mimeType, colorInfo.colorTransfer);
boolean hasNeededHdrSupport =
boolean hasNeededHdrSupport = false;
// Some Dolby Vision encoders do not advertise FEATURE_HlgEditing but correctly support 10-bit
// input surface.
if (mimeType.equals(MimeTypes.VIDEO_DOLBY_VISION)) {
hasNeededHdrSupport = true;
} else {
hasNeededHdrSupport =
isFeatureSupported(
mediaCodecInfo, mimeType, MediaCodecInfo.CodecCapabilities.FEATURE_HdrEditing)
|| (colorInfo.colorTransfer == C.COLOR_TRANSFER_HLG
&& Util.SDK_INT >= 35
&& isFeatureSupported(
mediaCodecInfo, mimeType, MediaCodecInfo.CodecCapabilities.FEATURE_HlgEditing));
mediaCodecInfo,
mimeType,
MediaCodecInfo.CodecCapabilities.FEATURE_HlgEditing));
}
if (!hasNeededHdrSupport) {
return false;
}
ImmutableList<Integer> allowedColorProfiles =
getCodecProfilesForHdrFormat(mimeType, colorInfo.colorTransfer);
for (MediaCodecInfo.CodecProfileLevel codecProfileLevel :
mediaCodecInfo.getCapabilitiesForType(mimeType).profileLevels) {
if (allowedColorProfiles.contains(codecProfileLevel.profile)) {
@ -172,6 +181,12 @@ public final class EncoderUtil {
return ImmutableList.of(MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10HDR10);
}
break;
case MimeTypes.VIDEO_DOLBY_VISION:
if (colorTransfer == C.COLOR_TRANSFER_HLG) {
return ImmutableList.of(MediaCodecInfo.CodecProfileLevel.DolbyVisionProfileDvheSt);
}
// CodecProfileLevel does not support PQ for Dolby Vision.
break;
default:
break;
}

View File

@ -17,14 +17,19 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.CodecSpecificDataUtil.getCodecProfileAndLevel;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.common.util.Util.castNonNull;
import static java.lang.Integer.max;
import android.annotation.SuppressLint;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Pair;
import android.util.SparseArray;
import androidx.annotation.RequiresApi;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.Metadata;
@ -130,6 +135,10 @@ import java.util.Locale;
if (isVideo) {
mediaFormat = MediaFormat.createVideoFormat(sampleMimeType, format.width, format.height);
MediaFormatUtil.maybeSetColorInfo(mediaFormat, format.colorInfo);
if (sampleMimeType.equals(MimeTypes.VIDEO_DOLBY_VISION) && SDK_INT >= 33) {
mediaFormat.setInteger(MediaFormat.KEY_PROFILE, getDvProfile());
mediaFormat.setInteger(MediaFormat.KEY_LEVEL, getDvLevel(format));
}
try {
mediaMuxer.setOrientationHint(format.rotationDegrees);
} catch (RuntimeException e) {
@ -305,9 +314,78 @@ import java.util.Locale;
if (SDK_INT >= 24) {
supportedMimeTypes.add(MimeTypes.VIDEO_H265);
}
if (SDK_INT >= 33) {
supportedMimeTypes.add(MimeTypes.VIDEO_DOLBY_VISION);
}
if (SDK_INT >= 34) {
supportedMimeTypes.add(MimeTypes.VIDEO_AV1);
}
return supportedMimeTypes.build();
}
/**
* Get Dolby Vision profile.
*
* <p>Refer to <a
* href="https://professionalsupport.dolby.com/s/article/What-is-Dolby-Vision-Profile">Dolby
* Vision profiles and levels.</a>.
*/
@RequiresApi(33)
private static int getDvProfile() {
// Currently, only profile 8 is supported.
return MediaCodecInfo.CodecProfileLevel.DolbyVisionProfileDvheSt;
}
/**
* Get Dolby Vision level
*
* <p>Refer to <a
* href="https://professionalsupport.dolby.com/s/article/What-is-Dolby-Vision-Profile">What are
* Dolby Vision profiles and levels</a>.
*/
@RequiresApi(33)
private static int getDvLevel(Format format) {
if (format.codecs != null) {
Pair<Integer, Integer> profileAndLevel = getCodecProfileAndLevel(format);
return checkNotNull(profileAndLevel).second;
}
int maxWidthHeight = max(format.width, format.height);
checkState(maxWidthHeight <= 7680);
float pps = format.width * format.height * format.frameRate;
int level = -1;
if (maxWidthHeight <= 1_280) {
if (pps <= 22_118_400) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelHd24; // Level 01
} else { // pps <= 27_648_000
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelHd30; // Level 02
}
} else if (maxWidthHeight <= 1_920 && pps <= 49_766_400) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelFhd24; // Level 03
} else if (maxWidthHeight <= 2_560 && pps <= 62_208_000) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelFhd30; // Level 04
} else if (maxWidthHeight <= 3_840) {
if (pps <= 124_416_000) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelFhd60; // Level 05
} else if (pps <= 199_065_600) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelUhd24; // Level 06
} else if (pps <= 248_832_000) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelUhd30; // Level 07
} else if (pps <= 398_131_200) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelUhd48; // Level 08
} else if (pps <= 497_664_000) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelUhd60; // Level 09
} else { // pps <= 995_328_000
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevelUhd120; // Level 10
}
} else if (maxWidthHeight <= 7_680) {
if (pps <= 995_328_000) {
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevel8k30; // Level 11
} else { // pps <= 1_990_656_000
level = MediaCodecInfo.CodecProfileLevel.DolbyVisionLevel8k60; // Level 12
}
}
return level;
}
}