mirror of
https://github.com/androidx/media.git
synced 2025-05-05 06:30:24 +08:00
Fix clipping in AudioSamplePipeline
When clipping a MediaItem with start time > 0, the audio was ending before the video. This is because: - Audio timestamps are computed based on the sample sizes, with a start time set to streamOffsetUs (i.e. the streamStartPositionUs is not taken into account). - The SamplePipeline was subtracting streamStartPositionUs from the timestamps before sending the samples to the muxer. - As a result, the audio timestamps were shifted by streamStartPositionUs, while they should be shifter by streamOffsetUs. PiperOrigin-RevId: 511175923
This commit is contained in:
parent
3009b4d5d1
commit
f4766ee4db
@ -60,7 +60,6 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
||||||
public AudioSamplePipeline(
|
public AudioSamplePipeline(
|
||||||
Format firstInputFormat,
|
Format firstInputFormat,
|
||||||
long streamStartPositionUs,
|
|
||||||
long streamOffsetUs,
|
long streamOffsetUs,
|
||||||
TransformationRequest transformationRequest,
|
TransformationRequest transformationRequest,
|
||||||
boolean flattenForSlowMotion,
|
boolean flattenForSlowMotion,
|
||||||
@ -69,7 +68,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
MuxerWrapper muxerWrapper,
|
MuxerWrapper muxerWrapper,
|
||||||
FallbackListener fallbackListener)
|
FallbackListener fallbackListener)
|
||||||
throws ExportException {
|
throws ExportException {
|
||||||
super(firstInputFormat, streamStartPositionUs, muxerWrapper);
|
super(firstInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
|
||||||
|
|
||||||
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat);
|
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat);
|
||||||
availableInputBuffers = new ConcurrentLinkedDeque<>();
|
availableInputBuffers = new ConcurrentLinkedDeque<>();
|
||||||
|
@ -196,7 +196,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
compositeAssetLoaderListener.onTrackAdded(
|
compositeAssetLoaderListener.onTrackAdded(
|
||||||
firstAudioFormat,
|
firstAudioFormat,
|
||||||
SUPPORTED_OUTPUT_TYPE_DECODED,
|
SUPPORTED_OUTPUT_TYPE_DECODED,
|
||||||
/* streamStartPositionUs= */ streamOffsetUs,
|
streamStartPositionUs,
|
||||||
streamOffsetUs));
|
streamOffsetUs));
|
||||||
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer);
|
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer);
|
||||||
}
|
}
|
||||||
|
@ -434,7 +434,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) {
|
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) {
|
||||||
return new AudioSamplePipeline(
|
return new AudioSamplePipeline(
|
||||||
firstInputFormat,
|
firstInputFormat,
|
||||||
streamStartPositionUs,
|
|
||||||
streamOffsetUs,
|
streamOffsetUs,
|
||||||
transformationRequest,
|
transformationRequest,
|
||||||
firstEditedMediaItem.flattenForSlowMotion,
|
firstEditedMediaItem.flattenForSlowMotion,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user