mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Calculate min timestamp across tracks in the Boxes.moov method
The Boxes.moov method can do the calculation instead of caller doing it. PiperOrigin-RevId: 679653033
This commit is contained in:
parent
4481b3567e
commit
b0b54ca018
@ -22,6 +22,7 @@ import static androidx.media3.muxer.ColorUtils.MEDIAFORMAT_STANDARD_TO_PRIMARIES
|
|||||||
import static androidx.media3.muxer.ColorUtils.MEDIAFORMAT_TRANSFER_TO_MP4_TRANSFER;
|
import static androidx.media3.muxer.ColorUtils.MEDIAFORMAT_TRANSFER_TO_MP4_TRANSFER;
|
||||||
import static androidx.media3.muxer.MuxerUtil.UNSIGNED_INT_MAX_VALUE;
|
import static androidx.media3.muxer.MuxerUtil.UNSIGNED_INT_MAX_VALUE;
|
||||||
import static java.lang.Math.max;
|
import static java.lang.Math.max;
|
||||||
|
import static java.lang.Math.min;
|
||||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||||
|
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
@ -118,7 +119,6 @@ import org.checkerframework.checker.nullness.qual.PolyNull;
|
|||||||
public static ByteBuffer moov(
|
public static ByteBuffer moov(
|
||||||
List<Track> tracks,
|
List<Track> tracks,
|
||||||
MetadataCollector metadataCollector,
|
MetadataCollector metadataCollector,
|
||||||
long minInputPtsUs,
|
|
||||||
boolean isFragmentedMp4,
|
boolean isFragmentedMp4,
|
||||||
@Mp4Muxer.LastSampleDurationBehavior int lastSampleDurationBehavior) {
|
@Mp4Muxer.LastSampleDurationBehavior int lastSampleDurationBehavior) {
|
||||||
// The timestamp will always fit into a 32-bit integer. This is already validated in the
|
// The timestamp will always fit into a 32-bit integer. This is already validated in the
|
||||||
@ -127,6 +127,15 @@ import org.checkerframework.checker.nullness.qual.PolyNull;
|
|||||||
int creationTimestampSeconds = (int) metadataCollector.timestampData.creationTimestampSeconds;
|
int creationTimestampSeconds = (int) metadataCollector.timestampData.creationTimestampSeconds;
|
||||||
int modificationTimestampSeconds =
|
int modificationTimestampSeconds =
|
||||||
(int) metadataCollector.timestampData.modificationTimestampSeconds;
|
(int) metadataCollector.timestampData.modificationTimestampSeconds;
|
||||||
|
long minInputPtsUs = findMinimumPresentationTimestampUsAcrossTracks(tracks);
|
||||||
|
|
||||||
|
// For a non fragmented MP4 file, avoid writing an empty moov box.
|
||||||
|
// For a fragmented MP4 file, the minInputPtsUs gets ignored as the moov box is written without
|
||||||
|
// any sample info.
|
||||||
|
if (!isFragmentedMp4 && minInputPtsUs == C.TIME_UNSET) {
|
||||||
|
return ByteBuffer.allocate(0);
|
||||||
|
}
|
||||||
|
|
||||||
List<ByteBuffer> trakBoxes = new ArrayList<>();
|
List<ByteBuffer> trakBoxes = new ArrayList<>();
|
||||||
List<ByteBuffer> trexBoxes = new ArrayList<>();
|
List<ByteBuffer> trexBoxes = new ArrayList<>();
|
||||||
|
|
||||||
@ -134,6 +143,7 @@ import org.checkerframework.checker.nullness.qual.PolyNull;
|
|||||||
long videoDurationUs = 0L;
|
long videoDurationUs = 0L;
|
||||||
for (int i = 0; i < tracks.size(); i++) {
|
for (int i = 0; i < tracks.size(); i++) {
|
||||||
Track track = tracks.get(i);
|
Track track = tracks.get(i);
|
||||||
|
// For a non fragmented MP4 file, avoid writing an empty track.
|
||||||
if (!isFragmentedMp4 && track.writtenSamples.isEmpty()) {
|
if (!isFragmentedMp4 && track.writtenSamples.isEmpty()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1830,4 +1840,15 @@ import org.checkerframework.checker.nullness.qual.PolyNull;
|
|||||||
return Util.scaleLargeValue(
|
return Util.scaleLargeValue(
|
||||||
timestampUs, videoUnitTimebase, C.MICROS_PER_SECOND, RoundingMode.HALF_UP);
|
timestampUs, videoUnitTimebase, C.MICROS_PER_SECOND, RoundingMode.HALF_UP);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static long findMinimumPresentationTimestampUsAcrossTracks(List<Track> tracks) {
|
||||||
|
long minInputPtsUs = Long.MAX_VALUE;
|
||||||
|
for (int i = 0; i < tracks.size(); i++) {
|
||||||
|
Track track = tracks.get(i);
|
||||||
|
if (!track.writtenSamples.isEmpty()) {
|
||||||
|
minInputPtsUs = min(track.writtenSamples.get(0).presentationTimeUs, minInputPtsUs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return minInputPtsUs != Long.MAX_VALUE ? minInputPtsUs : C.TIME_UNSET;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -202,14 +202,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private void createHeader() throws IOException {
|
private void createHeader() throws IOException {
|
||||||
output.position(0L);
|
output.position(0L);
|
||||||
output.write(Boxes.ftyp());
|
output.write(Boxes.ftyp());
|
||||||
// The minInputPtsUs is actually ignored as there are no pending samples to write.
|
|
||||||
output.write(
|
output.write(
|
||||||
Boxes.moov(
|
Boxes.moov(
|
||||||
tracks,
|
tracks, metadataCollector, /* isFragmentedMp4= */ true, lastSampleDurationBehavior));
|
||||||
metadataCollector,
|
|
||||||
/* minInputPtsUs= */ 0L,
|
|
||||||
/* isFragmentedMp4= */ true,
|
|
||||||
lastSampleDurationBehavior));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean shouldFlushPendingSamples(
|
private boolean shouldFlushPendingSamples(
|
||||||
|
@ -202,7 +202,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
Boxes.moov(
|
Boxes.moov(
|
||||||
editableVideoTracks,
|
editableVideoTracks,
|
||||||
editableVideoMetadataCollector,
|
editableVideoMetadataCollector,
|
||||||
findMinimumPresentationTimestampUsAcrossTracks(editableVideoTracks),
|
|
||||||
/* isFragmentedMp4= */ false,
|
/* isFragmentedMp4= */ false,
|
||||||
lastSampleDurationBehavior);
|
lastSampleDurationBehavior);
|
||||||
ByteBuffer edvdBoxHeader =
|
ByteBuffer edvdBoxHeader =
|
||||||
@ -273,17 +272,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
outputFileChannel.truncate(newMoovLocation + moovBytesNeeded);
|
outputFileChannel.truncate(newMoovLocation + moovBytesNeeded);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static long findMinimumPresentationTimestampUsAcrossTracks(List<Track> tracks) {
|
|
||||||
long minInputPtsUs = Long.MAX_VALUE;
|
|
||||||
for (int i = 0; i < tracks.size(); i++) {
|
|
||||||
Track track = tracks.get(i);
|
|
||||||
if (!track.writtenSamples.isEmpty()) {
|
|
||||||
minInputPtsUs = min(track.writtenSamples.get(0).presentationTimeUs, minInputPtsUs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return minInputPtsUs;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void writeHeader() throws IOException {
|
private void writeHeader() throws IOException {
|
||||||
outputFileChannel.position(0L);
|
outputFileChannel.position(0L);
|
||||||
outputFileChannel.write(Boxes.ftyp());
|
outputFileChannel.write(Boxes.ftyp());
|
||||||
@ -311,24 +299,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private ByteBuffer assembleCurrentMoovData() {
|
private ByteBuffer assembleCurrentMoovData() {
|
||||||
// Recalculate the min timestamp every time, in case some new samples have smaller timestamps.
|
|
||||||
long minInputPtsUs = findMinimumPresentationTimestampUsAcrossTracks(tracks);
|
|
||||||
|
|
||||||
ByteBuffer moovHeader;
|
return Boxes.moov(
|
||||||
if (minInputPtsUs != Long.MAX_VALUE) {
|
tracks, metadataCollector, /* isFragmentedMp4= */ false, lastSampleDurationBehavior);
|
||||||
moovHeader =
|
|
||||||
Boxes.moov(
|
|
||||||
tracks,
|
|
||||||
metadataCollector,
|
|
||||||
minInputPtsUs,
|
|
||||||
/* isFragmentedMp4= */ false,
|
|
||||||
lastSampleDurationBehavior);
|
|
||||||
} else {
|
|
||||||
// Skip moov box, if there are no samples.
|
|
||||||
moovHeader = ByteBuffer.allocate(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
return moovHeader;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
x
Reference in New Issue
Block a user