Add MediaItem change listener

PiperOrigin-RevId: 506868341
This commit is contained in:
kimvde 2023-02-03 12:27:45 +00:00 committed by microkatz
parent 77ae41354e
commit b5bb6e7619
8 changed files with 112 additions and 38 deletions

View File

@ -57,6 +57,8 @@ import org.checkerframework.dataflow.qual.Pure;
private long nextEncoderInputBufferTimeUs; private long nextEncoderInputBufferTimeUs;
private long encoderBufferDurationRemainder; private long encoderBufferDurationRemainder;
private volatile long mediaItemOffsetUs;
// TODO(b/260618558): Move silent audio generation upstream of this component. // TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline( public AudioSamplePipeline(
Format inputFormat, Format inputFormat,
@ -151,6 +153,12 @@ import org.checkerframework.dataflow.qual.Pure;
nextEncoderInputBufferTimeUs = streamOffsetUs; nextEncoderInputBufferTimeUs = streamOffsetUs;
} }
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
this.mediaItemOffsetUs = mediaItemOffsetUs;
}
@Override @Override
@Nullable @Nullable
public DecoderInputBuffer getInputBuffer() { public DecoderInputBuffer getInputBuffer() {
@ -159,7 +167,9 @@ import org.checkerframework.dataflow.qual.Pure;
@Override @Override
public void queueInputBuffer() { public void queueInputBuffer() {
pendingInputBuffers.add(availableInputBuffers.remove()); DecoderInputBuffer inputBuffer = availableInputBuffers.remove();
inputBuffer.timeUs += mediaItemOffsetUs;
pendingInputBuffers.add(inputBuffer);
} }
@Override @Override

View File

@ -15,6 +15,7 @@
*/ */
package androidx.media3.transformer; package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull; import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE; import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
@ -26,6 +27,7 @@ import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.common.util.HandlerWrapper; import androidx.media3.common.util.HandlerWrapper;
@ -49,6 +51,7 @@ import java.util.concurrent.atomic.AtomicLong;
private final HandlerWrapper handler; private final HandlerWrapper handler;
private final Listener compositeAssetLoaderListener; private final Listener compositeAssetLoaderListener;
private final Map<Integer, SampleConsumer> sampleConsumersByTrackType; private final Map<Integer, SampleConsumer> sampleConsumersByTrackType;
private final Map<Integer, OnMediaItemChangedListener> mediaItemChangedListenersByTrackType;
private final AtomicLong totalDurationUs; private final AtomicLong totalDurationUs;
private final AtomicInteger nonEndedTracks; private final AtomicInteger nonEndedTracks;
@ -68,6 +71,7 @@ import java.util.concurrent.atomic.AtomicLong;
currentMediaItemIndex = new AtomicInteger(); currentMediaItemIndex = new AtomicInteger();
handler = clock.createHandler(looper, /* callback= */ null); handler = clock.createHandler(looper, /* callback= */ null);
sampleConsumersByTrackType = new HashMap<>(); sampleConsumersByTrackType = new HashMap<>();
mediaItemChangedListenersByTrackType = new HashMap<>();
totalDurationUs = new AtomicLong(); totalDurationUs = new AtomicLong();
nonEndedTracks = new AtomicInteger(); nonEndedTracks = new AtomicInteger();
// It's safe to use "this" because we don't start the AssetLoader before exiting the // It's safe to use "this" because we don't start the AssetLoader before exiting the
@ -110,6 +114,21 @@ import java.util.concurrent.atomic.AtomicLong;
currentAssetLoader.release(); currentAssetLoader.release();
} }
/**
* Adds an {@link OnMediaItemChangedListener} for the given track type.
*
* <p>There can't be more than one {@link OnMediaItemChangedListener} for the same track type.
*
* @param onMediaItemChangedListener The {@link OnMediaItemChangedListener}.
* @param trackType The {@link C.TrackType} for which to listen to {@link MediaItem} change
* events.
*/
public void addOnMediaItemChangedListener(
OnMediaItemChangedListener onMediaItemChangedListener, @C.TrackType int trackType) {
checkArgument(mediaItemChangedListenersByTrackType.get(trackType) == null);
mediaItemChangedListenersByTrackType.put(trackType, onMediaItemChangedListener);
}
// AssetLoader.Listener implementation. // AssetLoader.Listener implementation.
@Override @Override
@ -144,17 +163,29 @@ import java.util.concurrent.atomic.AtomicLong;
long streamOffsetUs) long streamOffsetUs)
throws TransformationException { throws TransformationException {
int trackType = MimeTypes.getTrackType(format.sampleMimeType); int trackType = MimeTypes.getTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer;
if (currentMediaItemIndex.get() == 0) { if (currentMediaItemIndex.get() == 0) {
SampleConsumer sampleConsumer = sampleConsumer =
new SampleConsumerWrapper( new SampleConsumerWrapper(
compositeAssetLoaderListener.onTrackAdded( compositeAssetLoaderListener.onTrackAdded(
format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs)); format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs));
sampleConsumersByTrackType.put(trackType, sampleConsumer); sampleConsumersByTrackType.put(trackType, sampleConsumer);
return sampleConsumer; } else {
sampleConsumer =
checkStateNotNull(
sampleConsumersByTrackType.get(trackType),
"The preceding MediaItem does not contain any track of type " + trackType);
} }
return checkStateNotNull( @Nullable
sampleConsumersByTrackType.get(trackType), OnMediaItemChangedListener onMediaItemChangedListener =
"The preceding MediaItem does not contain any track of type " + trackType); mediaItemChangedListenersByTrackType.get(trackType);
if (onMediaItemChangedListener != null) {
onMediaItemChangedListener.onMediaItemChanged(
editedMediaItems.get(currentMediaItemIndex.get()),
format,
/* mediaItemOffsetUs= */ totalDurationUs.get());
}
return sampleConsumer;
} }
@Override @Override
@ -200,7 +231,6 @@ import java.util.concurrent.atomic.AtomicLong;
return; return;
} }
} }
inputBuffer.timeUs += totalDurationUs.get();
sampleConsumer.queueInputBuffer(); sampleConsumer.queueInputBuffer();
} }
@ -219,11 +249,6 @@ import java.util.concurrent.atomic.AtomicLong;
return sampleConsumer.getPendingVideoFrameCount(); return sampleConsumer.getPendingVideoFrameCount();
} }
@Override
public void setVideoOffsetToAddUs(long offsetToAddUs) {
sampleConsumer.setVideoOffsetToAddUs(offsetToAddUs);
}
@Override @Override
public void registerVideoFrame() { public void registerVideoFrame() {
sampleConsumer.registerVideoFrame(); sampleConsumer.registerVideoFrame();
@ -235,7 +260,6 @@ import java.util.concurrent.atomic.AtomicLong;
if (currentMediaItemIndex.get() < editedMediaItems.size() - 1) { if (currentMediaItemIndex.get() < editedMediaItems.size() - 1) {
if (nonEndedTracks.get() == 0) { if (nonEndedTracks.get() == 0) {
switchAssetLoader(); switchAssetLoader();
sampleConsumer.setVideoOffsetToAddUs(totalDurationUs.get());
} }
return; return;
} }

View File

@ -35,6 +35,7 @@ import java.util.concurrent.ConcurrentLinkedDeque;
private final Queue<DecoderInputBuffer> availableInputBuffers; private final Queue<DecoderInputBuffer> availableInputBuffers;
private final Queue<DecoderInputBuffer> pendingInputBuffers; private final Queue<DecoderInputBuffer> pendingInputBuffers;
private volatile long mediaItemOffsetUs;
private volatile boolean inputEnded; private volatile boolean inputEnded;
public EncodedSamplePipeline( public EncodedSamplePipeline(
@ -56,6 +57,12 @@ import java.util.concurrent.ConcurrentLinkedDeque;
fallbackListener.onTransformationRequestFinalized(transformationRequest); fallbackListener.onTransformationRequestFinalized(transformationRequest);
} }
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
this.mediaItemOffsetUs = mediaItemOffsetUs;
}
@Override @Override
public boolean expectsDecodedData() { public boolean expectsDecodedData() {
return false; return false;
@ -73,6 +80,7 @@ import java.util.concurrent.ConcurrentLinkedDeque;
if (inputBuffer.isEndOfStream()) { if (inputBuffer.isEndOfStream()) {
inputEnded = true; inputEnded = true;
} else { } else {
inputBuffer.timeUs += mediaItemOffsetUs;
pendingInputBuffers.add(inputBuffer); pendingInputBuffers.add(inputBuffer);
} }
} }

View File

@ -0,0 +1,38 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
/** A listener for {@link MediaItem} changes in the {@linkplain SamplePipeline sample pipelines}. */
/* package */ interface OnMediaItemChangedListener {
/**
* Called when the {@link MediaItem} whose samples are passed to the {@link SamplePipeline}
* changes.
*
* <p>Can be called from any thread.
*
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param trackFormat The {@link Format} of the {@link EditedMediaItem} track corresponding to the
* {@link SamplePipeline}.
* @param mediaItemOffsetUs The offset to add to the presentation timestamps of the {@link
* EditedMediaItem} samples received by the {@link SamplePipeline}, in microseconds.
*/
void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs);
}

View File

@ -111,15 +111,6 @@ public interface SampleConsumer {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
/**
* Sets the offset to add to the video timestamps, in microseconds.
*
* <p>Should only be used for raw video data.
*/
default void setVideoOffsetToAddUs(long offsetToAddUs) {
throw new UnsupportedOperationException();
}
/** /**
* Informs the consumer that a frame will be queued to the {@linkplain #getInputSurface() input * Informs the consumer that a frame will be queued to the {@linkplain #getInputSurface() input
* surface}. * surface}.

View File

@ -29,7 +29,7 @@ import androidx.media3.decoder.DecoderInputBuffer;
* *
* <p>This pipeline can be used to implement transformations of audio or video samples. * <p>This pipeline can be used to implement transformations of audio or video samples.
*/ */
/* package */ abstract class SamplePipeline implements SampleConsumer { /* package */ abstract class SamplePipeline implements SampleConsumer, OnMediaItemChangedListener {
private final long streamStartPositionUs; private final long streamStartPositionUs;
private final MuxerWrapper muxerWrapper; private final MuxerWrapper muxerWrapper;

View File

@ -96,7 +96,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final Clock clock; private final Clock clock;
private final HandlerThread internalHandlerThread; private final HandlerThread internalHandlerThread;
private final HandlerWrapper internalHandler; private final HandlerWrapper internalHandler;
private final AssetLoader assetLoader; private final CompositeAssetLoader compositeAssetLoader;
private final List<SamplePipeline> samplePipelines; private final List<SamplePipeline> samplePipelines;
private final MuxerWrapper muxerWrapper; private final MuxerWrapper muxerWrapper;
private final ConditionVariable transformerConditionVariable; private final ConditionVariable transformerConditionVariable;
@ -136,7 +136,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Looper internalLooper = internalHandlerThread.getLooper(); Looper internalLooper = internalHandlerThread.getLooper();
EditedMediaItemSequence sequence = composition.sequences.get(0); EditedMediaItemSequence sequence = composition.sequences.get(0);
ComponentListener componentListener = new ComponentListener(sequence, fallbackListener); ComponentListener componentListener = new ComponentListener(sequence, fallbackListener);
assetLoader = compositeAssetLoader =
new CompositeAssetLoader( new CompositeAssetLoader(
sequence, assetLoaderFactory, internalLooper, componentListener, clock); sequence, assetLoaderFactory, internalLooper, componentListener, clock);
samplePipelines = new ArrayList<>(); samplePipelines = new ArrayList<>();
@ -220,7 +220,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
private void startInternal() { private void startInternal() {
assetLoader.start(); compositeAssetLoader.start();
} }
private void registerSamplePipelineInternal(SamplePipeline samplePipeline) { private void registerSamplePipelineInternal(SamplePipeline samplePipeline) {
@ -243,7 +243,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private void endInternal( private void endInternal(
@EndReason int endReason, @Nullable TransformationException transformationException) { @EndReason int endReason, @Nullable TransformationException transformationException) {
ImmutableMap<Integer, String> decoderNames = assetLoader.getDecoderNames(); ImmutableMap<Integer, String> decoderNames = compositeAssetLoader.getDecoderNames();
transformationResultBuilder transformationResultBuilder
.setAudioDecoderName(decoderNames.get(C.TRACK_TYPE_AUDIO)) .setAudioDecoderName(decoderNames.get(C.TRACK_TYPE_AUDIO))
.setVideoDecoderName(decoderNames.get(C.TRACK_TYPE_VIDEO)) .setVideoDecoderName(decoderNames.get(C.TRACK_TYPE_VIDEO))
@ -257,7 +257,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
try { try {
try { try {
assetLoader.release(); compositeAssetLoader.release();
} finally { } finally {
try { try {
for (int i = 0; i < samplePipelines.size(); i++) { for (int i = 0; i < samplePipelines.size(); i++) {
@ -309,7 +309,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
private void updateProgressInternal(ProgressHolder progressHolder) { private void updateProgressInternal(ProgressHolder progressHolder) {
progressState = assetLoader.getProgress(progressHolder); progressState = compositeAssetLoader.getProgress(progressHolder);
transformerConditionVariable.open(); transformerConditionVariable.open();
} }
@ -366,10 +366,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long streamStartPositionUs, long streamStartPositionUs,
long streamOffsetUs) long streamOffsetUs)
throws TransformationException { throws TransformationException {
int trackType = MimeTypes.getTrackType(format.sampleMimeType);
if (!trackAdded) { if (!trackAdded) {
if (generateSilentAudio) { if (generateSilentAudio) {
if (this.trackCount.get() == 1 && MimeTypes.isVideo(format.sampleMimeType)) { if (trackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
this.trackCount.incrementAndGet(); trackCount.incrementAndGet();
} else { } else {
generateSilentAudio = false; generateSilentAudio = false;
} }
@ -384,6 +385,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
SamplePipeline samplePipeline = SamplePipeline samplePipeline =
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs); getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
compositeAssetLoader.addOnMediaItemChangedListener(samplePipeline, trackType);
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget(); internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
if (generateSilentAudio) { if (generateSilentAudio) {
@ -399,6 +401,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
SUPPORTED_OUTPUT_TYPE_DECODED, SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs, streamStartPositionUs,
streamOffsetUs); streamOffsetUs);
compositeAssetLoader.addOnMediaItemChangedListener(audioSamplePipeline, C.TRACK_TYPE_AUDIO);
internalHandler internalHandler
.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, audioSamplePipeline) .obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, audioSamplePipeline)
.sendToTarget(); .sendToTarget();

View File

@ -209,7 +209,13 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
.setStreamOffsetUs(streamOffsetUs) .setStreamOffsetUs(streamOffsetUs)
.build(); .build();
frameProcessor.setInputFrameInfo(firstFrameInfo); }
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
} }
@Override @Override
@ -222,12 +228,6 @@ import org.checkerframework.dataflow.qual.Pure;
return frameProcessorInputColor; return frameProcessorInputColor;
} }
@Override
public void setVideoOffsetToAddUs(long offsetToAddUs) {
frameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(offsetToAddUs).build());
}
@Override @Override
public void registerVideoFrame() { public void registerVideoFrame() {
frameProcessor.registerInputFrame(); frameProcessor.registerInputFrame();