Add MediaItem change listener

PiperOrigin-RevId: 506868341
This commit is contained in:
kimvde 2023-02-03 12:27:45 +00:00 committed by microkatz
parent 77ae41354e
commit b5bb6e7619
8 changed files with 112 additions and 38 deletions

View File

@ -57,6 +57,8 @@ import org.checkerframework.dataflow.qual.Pure;
private long nextEncoderInputBufferTimeUs;
private long encoderBufferDurationRemainder;
private volatile long mediaItemOffsetUs;
// TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline(
Format inputFormat,
@ -151,6 +153,12 @@ import org.checkerframework.dataflow.qual.Pure;
nextEncoderInputBufferTimeUs = streamOffsetUs;
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
this.mediaItemOffsetUs = mediaItemOffsetUs;
}
@Override
@Nullable
public DecoderInputBuffer getInputBuffer() {
@ -159,7 +167,9 @@ import org.checkerframework.dataflow.qual.Pure;
@Override
public void queueInputBuffer() {
pendingInputBuffers.add(availableInputBuffers.remove());
DecoderInputBuffer inputBuffer = availableInputBuffers.remove();
inputBuffer.timeUs += mediaItemOffsetUs;
pendingInputBuffers.add(inputBuffer);
}
@Override

View File

@ -15,6 +15,7 @@
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
@ -26,6 +27,7 @@ import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.HandlerWrapper;
@ -49,6 +51,7 @@ import java.util.concurrent.atomic.AtomicLong;
private final HandlerWrapper handler;
private final Listener compositeAssetLoaderListener;
private final Map<Integer, SampleConsumer> sampleConsumersByTrackType;
private final Map<Integer, OnMediaItemChangedListener> mediaItemChangedListenersByTrackType;
private final AtomicLong totalDurationUs;
private final AtomicInteger nonEndedTracks;
@ -68,6 +71,7 @@ import java.util.concurrent.atomic.AtomicLong;
currentMediaItemIndex = new AtomicInteger();
handler = clock.createHandler(looper, /* callback= */ null);
sampleConsumersByTrackType = new HashMap<>();
mediaItemChangedListenersByTrackType = new HashMap<>();
totalDurationUs = new AtomicLong();
nonEndedTracks = new AtomicInteger();
// It's safe to use "this" because we don't start the AssetLoader before exiting the
@ -110,6 +114,21 @@ import java.util.concurrent.atomic.AtomicLong;
currentAssetLoader.release();
}
/**
* Adds an {@link OnMediaItemChangedListener} for the given track type.
*
* <p>There can't be more than one {@link OnMediaItemChangedListener} for the same track type.
*
* @param onMediaItemChangedListener The {@link OnMediaItemChangedListener}.
* @param trackType The {@link C.TrackType} for which to listen to {@link MediaItem} change
* events.
*/
public void addOnMediaItemChangedListener(
OnMediaItemChangedListener onMediaItemChangedListener, @C.TrackType int trackType) {
checkArgument(mediaItemChangedListenersByTrackType.get(trackType) == null);
mediaItemChangedListenersByTrackType.put(trackType, onMediaItemChangedListener);
}
// AssetLoader.Listener implementation.
@Override
@ -144,17 +163,29 @@ import java.util.concurrent.atomic.AtomicLong;
long streamOffsetUs)
throws TransformationException {
int trackType = MimeTypes.getTrackType(format.sampleMimeType);
SampleConsumer sampleConsumer;
if (currentMediaItemIndex.get() == 0) {
SampleConsumer sampleConsumer =
sampleConsumer =
new SampleConsumerWrapper(
compositeAssetLoaderListener.onTrackAdded(
format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs));
sampleConsumersByTrackType.put(trackType, sampleConsumer);
return sampleConsumer;
} else {
sampleConsumer =
checkStateNotNull(
sampleConsumersByTrackType.get(trackType),
"The preceding MediaItem does not contain any track of type " + trackType);
}
return checkStateNotNull(
sampleConsumersByTrackType.get(trackType),
"The preceding MediaItem does not contain any track of type " + trackType);
@Nullable
OnMediaItemChangedListener onMediaItemChangedListener =
mediaItemChangedListenersByTrackType.get(trackType);
if (onMediaItemChangedListener != null) {
onMediaItemChangedListener.onMediaItemChanged(
editedMediaItems.get(currentMediaItemIndex.get()),
format,
/* mediaItemOffsetUs= */ totalDurationUs.get());
}
return sampleConsumer;
}
@Override
@ -200,7 +231,6 @@ import java.util.concurrent.atomic.AtomicLong;
return;
}
}
inputBuffer.timeUs += totalDurationUs.get();
sampleConsumer.queueInputBuffer();
}
@ -219,11 +249,6 @@ import java.util.concurrent.atomic.AtomicLong;
return sampleConsumer.getPendingVideoFrameCount();
}
@Override
public void setVideoOffsetToAddUs(long offsetToAddUs) {
sampleConsumer.setVideoOffsetToAddUs(offsetToAddUs);
}
@Override
public void registerVideoFrame() {
sampleConsumer.registerVideoFrame();
@ -235,7 +260,6 @@ import java.util.concurrent.atomic.AtomicLong;
if (currentMediaItemIndex.get() < editedMediaItems.size() - 1) {
if (nonEndedTracks.get() == 0) {
switchAssetLoader();
sampleConsumer.setVideoOffsetToAddUs(totalDurationUs.get());
}
return;
}

View File

@ -35,6 +35,7 @@ import java.util.concurrent.ConcurrentLinkedDeque;
private final Queue<DecoderInputBuffer> availableInputBuffers;
private final Queue<DecoderInputBuffer> pendingInputBuffers;
private volatile long mediaItemOffsetUs;
private volatile boolean inputEnded;
public EncodedSamplePipeline(
@ -56,6 +57,12 @@ import java.util.concurrent.ConcurrentLinkedDeque;
fallbackListener.onTransformationRequestFinalized(transformationRequest);
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
this.mediaItemOffsetUs = mediaItemOffsetUs;
}
@Override
public boolean expectsDecodedData() {
return false;
@ -73,6 +80,7 @@ import java.util.concurrent.ConcurrentLinkedDeque;
if (inputBuffer.isEndOfStream()) {
inputEnded = true;
} else {
inputBuffer.timeUs += mediaItemOffsetUs;
pendingInputBuffers.add(inputBuffer);
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
/** A listener for {@link MediaItem} changes in the {@linkplain SamplePipeline sample pipelines}. */
/* package */ interface OnMediaItemChangedListener {
/**
* Called when the {@link MediaItem} whose samples are passed to the {@link SamplePipeline}
* changes.
*
* <p>Can be called from any thread.
*
* @param editedMediaItem The {@link MediaItem} with the transformations to apply to it.
* @param trackFormat The {@link Format} of the {@link EditedMediaItem} track corresponding to the
* {@link SamplePipeline}.
* @param mediaItemOffsetUs The offset to add to the presentation timestamps of the {@link
* EditedMediaItem} samples received by the {@link SamplePipeline}, in microseconds.
*/
void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs);
}

View File

@ -111,15 +111,6 @@ public interface SampleConsumer {
throw new UnsupportedOperationException();
}
/**
* Sets the offset to add to the video timestamps, in microseconds.
*
* <p>Should only be used for raw video data.
*/
default void setVideoOffsetToAddUs(long offsetToAddUs) {
throw new UnsupportedOperationException();
}
/**
* Informs the consumer that a frame will be queued to the {@linkplain #getInputSurface() input
* surface}.

View File

@ -29,7 +29,7 @@ import androidx.media3.decoder.DecoderInputBuffer;
*
* <p>This pipeline can be used to implement transformations of audio or video samples.
*/
/* package */ abstract class SamplePipeline implements SampleConsumer {
/* package */ abstract class SamplePipeline implements SampleConsumer, OnMediaItemChangedListener {
private final long streamStartPositionUs;
private final MuxerWrapper muxerWrapper;

View File

@ -96,7 +96,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final Clock clock;
private final HandlerThread internalHandlerThread;
private final HandlerWrapper internalHandler;
private final AssetLoader assetLoader;
private final CompositeAssetLoader compositeAssetLoader;
private final List<SamplePipeline> samplePipelines;
private final MuxerWrapper muxerWrapper;
private final ConditionVariable transformerConditionVariable;
@ -136,7 +136,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
Looper internalLooper = internalHandlerThread.getLooper();
EditedMediaItemSequence sequence = composition.sequences.get(0);
ComponentListener componentListener = new ComponentListener(sequence, fallbackListener);
assetLoader =
compositeAssetLoader =
new CompositeAssetLoader(
sequence, assetLoaderFactory, internalLooper, componentListener, clock);
samplePipelines = new ArrayList<>();
@ -220,7 +220,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private void startInternal() {
assetLoader.start();
compositeAssetLoader.start();
}
private void registerSamplePipelineInternal(SamplePipeline samplePipeline) {
@ -243,7 +243,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private void endInternal(
@EndReason int endReason, @Nullable TransformationException transformationException) {
ImmutableMap<Integer, String> decoderNames = assetLoader.getDecoderNames();
ImmutableMap<Integer, String> decoderNames = compositeAssetLoader.getDecoderNames();
transformationResultBuilder
.setAudioDecoderName(decoderNames.get(C.TRACK_TYPE_AUDIO))
.setVideoDecoderName(decoderNames.get(C.TRACK_TYPE_VIDEO))
@ -257,7 +257,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
try {
try {
assetLoader.release();
compositeAssetLoader.release();
} finally {
try {
for (int i = 0; i < samplePipelines.size(); i++) {
@ -309,7 +309,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private void updateProgressInternal(ProgressHolder progressHolder) {
progressState = assetLoader.getProgress(progressHolder);
progressState = compositeAssetLoader.getProgress(progressHolder);
transformerConditionVariable.open();
}
@ -366,10 +366,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long streamStartPositionUs,
long streamOffsetUs)
throws TransformationException {
int trackType = MimeTypes.getTrackType(format.sampleMimeType);
if (!trackAdded) {
if (generateSilentAudio) {
if (this.trackCount.get() == 1 && MimeTypes.isVideo(format.sampleMimeType)) {
this.trackCount.incrementAndGet();
if (trackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
trackCount.incrementAndGet();
} else {
generateSilentAudio = false;
}
@ -384,6 +385,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
SamplePipeline samplePipeline =
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
compositeAssetLoader.addOnMediaItemChangedListener(samplePipeline, trackType);
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
if (generateSilentAudio) {
@ -399,6 +401,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs,
streamOffsetUs);
compositeAssetLoader.addOnMediaItemChangedListener(audioSamplePipeline, C.TRACK_TYPE_AUDIO);
internalHandler
.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, audioSamplePipeline)
.sendToTarget();

View File

@ -209,7 +209,13 @@ import org.checkerframework.dataflow.qual.Pure;
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
.setStreamOffsetUs(streamOffsetUs)
.build();
frameProcessor.setInputFrameInfo(firstFrameInfo);
}
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
}
@Override
@ -222,12 +228,6 @@ import org.checkerframework.dataflow.qual.Pure;
return frameProcessorInputColor;
}
@Override
public void setVideoOffsetToAddUs(long offsetToAddUs) {
frameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(offsetToAddUs).build());
}
@Override
public void registerVideoFrame() {
frameProcessor.registerInputFrame();