Move video decoding to AssetLoader

PiperOrigin-RevId: 499454273
This commit is contained in:
kimvde 2023-01-04 12:46:09 +00:00 committed by Marc Baechinger
parent ef016832b2
commit d4491427de
12 changed files with 340 additions and 212 deletions

View File

@ -120,7 +120,11 @@ public interface FrameProcessor {
/** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */ /** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */
long DROP_OUTPUT_FRAME = -2; long DROP_OUTPUT_FRAME = -2;
/** Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. */ /**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from.
*
* <p>Can be called on any thread.
*/
Surface getInputSurface(); Surface getInputSurface();
/** /**
@ -142,6 +146,8 @@ public interface FrameProcessor {
* *
* <p>Must be called before rendering a frame to the frame processor's input surface. * <p>Must be called before rendering a frame to the frame processor's input surface.
* *
* <p>Can be called on any thread.
*
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
* #setInputFrameInfo(FrameInfo)}. * #setInputFrameInfo(FrameInfo)}.
*/ */
@ -150,6 +156,8 @@ public interface FrameProcessor {
/** /**
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered} * Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet. * but not processed off the {@linkplain #getInputSurface() input surface} yet.
*
* <p>Can be called on any thread.
*/ */
int getPendingInputFrameCount(); int getPendingInputFrameCount();
@ -194,6 +202,8 @@ public interface FrameProcessor {
/** /**
* Informs the {@code FrameProcessor} that no further input frames should be accepted. * Informs the {@code FrameProcessor} that no further input frames should be accepted.
* *
* <p>Can be called on any thread.
*
* @throws IllegalStateException If called more than once. * @throws IllegalStateException If called more than once.
*/ */
void signalEndOfInput(); void signalEndOfInput();

View File

@ -334,14 +334,15 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private final FinalMatrixTextureProcessorWrapper finalTextureProcessorWrapper; private final FinalMatrixTextureProcessorWrapper finalTextureProcessorWrapper;
private final ImmutableList<GlTextureProcessor> allTextureProcessors; private final ImmutableList<GlTextureProcessor> allTextureProcessors;
private @MonotonicNonNull FrameInfo nextInputFrameInfo;
private boolean inputStreamEnded;
/** /**
* Offset compared to original media presentation time that has been added to incoming frame * Offset compared to original media presentation time that has been added to incoming frame
* timestamps, in microseconds. * timestamps, in microseconds.
*/ */
private long previousStreamOffsetUs; private long previousStreamOffsetUs;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
private GlEffectsFrameProcessor( private GlEffectsFrameProcessor(
EGLDisplay eglDisplay, EGLDisplay eglDisplay,
EGLContext eglContext, EGLContext eglContext,

View File

@ -149,12 +149,11 @@ public interface AssetLoader {
* streamOffsetUs}), in microseconds. * streamOffsetUs}), in microseconds.
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are * @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
* non-negative, in microseconds. * non-negative, in microseconds.
* @return The {@link SamplePipeline.Input} describing the type of sample data expected, and to * @return The {@link SampleConsumer} describing the type of sample data expected, and to which
* which to pass this data. * to pass this data.
* @throws TransformationException If an error occurs configuring the {@link * @throws TransformationException If an error occurs configuring the {@link SampleConsumer}.
* SamplePipeline.Input}.
*/ */
SamplePipeline.Input onTrackAdded( SampleConsumer onTrackAdded(
Format format, Format format,
@SupportedOutputTypes int supportedOutputTypes, @SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,

View File

@ -35,7 +35,7 @@ import java.util.List;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.dataflow.qual.Pure; import org.checkerframework.dataflow.qual.Pure;
/** Pipeline to apply audio processing to raw audio samples, encode them and mux them. */ /** Pipeline to process, re-encode and mux raw audio samples. */
/* package */ final class AudioTranscodingSamplePipeline extends BaseSamplePipeline { /* package */ final class AudioTranscodingSamplePipeline extends BaseSamplePipeline {
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024; private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
@ -137,11 +137,6 @@ import org.checkerframework.dataflow.qual.Pure;
nextEncoderInputBufferTimeUs = streamOffsetUs; nextEncoderInputBufferTimeUs = streamOffsetUs;
} }
@Override
public boolean expectsDecodedData() {
return true;
}
@Override @Override
@Nullable @Nullable
public DecoderInputBuffer dequeueInputBuffer() { public DecoderInputBuffer dequeueInputBuffer() {

View File

@ -50,6 +50,11 @@ import androidx.media3.decoder.DecoderInputBuffer;
TransformationException.ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED); TransformationException.ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED);
} }
@Override
public boolean expectsDecodedData() {
return true;
}
@Override @Override
public boolean processData() throws TransformationException { public boolean processData() throws TransformationException {
return feedMuxer() || processDataUpToMuxer(); return feedMuxer() || processDataUpToMuxer();

View File

@ -25,6 +25,7 @@ import static androidx.media3.transformer.AssetLoader.SUPPORTED_OUTPUT_TYPE_ENCO
import android.media.MediaCodec; import android.media.MediaCodec;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
@ -34,6 +35,8 @@ import androidx.media3.exoplayer.MediaClock;
import androidx.media3.exoplayer.RendererCapabilities; import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.source.SampleStream.ReadDataResult; import androidx.media3.exoplayer.source.SampleStream.ReadDataResult;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@ -47,6 +50,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private final TransformerMediaClock mediaClock; private final TransformerMediaClock mediaClock;
private final AssetLoader.Listener assetLoaderListener; private final AssetLoader.Listener assetLoaderListener;
private final DecoderInputBuffer decoderInputBuffer; private final DecoderInputBuffer decoderInputBuffer;
private final List<Long> decodeOnlyPresentationTimestamps;
private boolean isTransformationRunning; private boolean isTransformationRunning;
private long streamStartPositionUs; private long streamStartPositionUs;
@ -54,7 +58,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private @MonotonicNonNull SefSlowMotionFlattener sefVideoSlowMotionFlattener; private @MonotonicNonNull SefSlowMotionFlattener sefVideoSlowMotionFlattener;
private @MonotonicNonNull Codec decoder; private @MonotonicNonNull Codec decoder;
@Nullable private ByteBuffer pendingDecoderOutputBuffer; @Nullable private ByteBuffer pendingDecoderOutputBuffer;
private SamplePipeline.@MonotonicNonNull Input samplePipelineInput; private int maxDecoderPendingFrameCount;
private @MonotonicNonNull SampleConsumer sampleConsumer;
private boolean isEnded; private boolean isEnded;
public ExoPlayerAssetLoaderRenderer( public ExoPlayerAssetLoaderRenderer(
@ -69,6 +74,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
this.mediaClock = mediaClock; this.mediaClock = mediaClock;
this.assetLoaderListener = assetLoaderListener; this.assetLoaderListener = assetLoaderListener;
decoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED); decoderInputBuffer = new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
decodeOnlyPresentationTimestamps = new ArrayList<>();
} }
@Override @Override
@ -112,10 +118,16 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return; return;
} }
if (samplePipelineInput.expectsDecodedData()) { if (sampleConsumer.expectsDecodedData()) {
while (feedPipelineFromDecoder() || feedDecoderFromInput()) {} if (getTrackType() == C.TRACK_TYPE_AUDIO) {
while (feedConsumerAudioFromDecoder() || feedDecoderFromInput()) {}
} else if (getTrackType() == C.TRACK_TYPE_VIDEO) {
while (feedConsumerVideoFromDecoder() || feedDecoderFromInput()) {}
} else {
throw new IllegalStateException();
}
} else { } else {
while (feedPipelineFromInput()) {} while (feedConsumerFromInput()) {}
} }
} catch (TransformationException e) { } catch (TransformationException e) {
isTransformationRunning = false; isTransformationRunning = false;
@ -151,9 +163,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
} }
@EnsuresNonNullIf(expression = "samplePipelineInput", result = true) @EnsuresNonNullIf(expression = "sampleConsumer", result = true)
private boolean ensureConfigured() throws TransformationException { private boolean ensureConfigured() throws TransformationException {
if (samplePipelineInput != null) { if (sampleConsumer != null) {
return true; return true;
} }
@ -166,30 +178,42 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
Format inputFormat = checkNotNull(formatHolder.format); Format inputFormat = checkNotNull(formatHolder.format);
@AssetLoader.SupportedOutputTypes @AssetLoader.SupportedOutputTypes
int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED; int supportedOutputTypes = SUPPORTED_OUTPUT_TYPE_ENCODED | SUPPORTED_OUTPUT_TYPE_DECODED;
samplePipelineInput = sampleConsumer =
assetLoaderListener.onTrackAdded( assetLoaderListener.onTrackAdded(
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs); inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
if (getTrackType() == C.TRACK_TYPE_VIDEO && flattenForSlowMotion) { if (getTrackType() == C.TRACK_TYPE_VIDEO && flattenForSlowMotion) {
sefVideoSlowMotionFlattener = new SefSlowMotionFlattener(inputFormat); sefVideoSlowMotionFlattener = new SefSlowMotionFlattener(inputFormat);
} }
if (samplePipelineInput.expectsDecodedData()) { if (sampleConsumer.expectsDecodedData()) {
decoder = decoderFactory.createForAudioDecoding(inputFormat); if (getTrackType() == C.TRACK_TYPE_AUDIO) {
decoder = decoderFactory.createForAudioDecoding(inputFormat);
} else if (getTrackType() == C.TRACK_TYPE_VIDEO) {
boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo)
&& !ColorInfo.isTransferHdr(sampleConsumer.getExpectedColorInfo());
decoder =
decoderFactory.createForVideoDecoding(
inputFormat,
checkNotNull(sampleConsumer.getInputSurface()),
isDecoderToneMappingRequired);
maxDecoderPendingFrameCount = decoder.getMaxPendingFrameCount();
} else {
throw new IllegalStateException();
}
} }
return true; return true;
} }
/** /**
* Attempts to read decoded data and pass it to the sample pipeline. * Attempts to get decoded audio data and pass it to the sample consumer.
* *
* @return Whether it may be possible to read more data immediately by calling this method again. * @return Whether it may be possible to read more data immediately by calling this method again.
* @throws TransformationException If an error occurs in the decoder or in the {@link * @throws TransformationException If an error occurs in the decoder.
* SamplePipeline}.
*/ */
@RequiresNonNull("samplePipelineInput") @RequiresNonNull("sampleConsumer")
private boolean feedPipelineFromDecoder() throws TransformationException { private boolean feedConsumerAudioFromDecoder() throws TransformationException {
@Nullable @Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.dequeueInputBuffer();
DecoderInputBuffer samplePipelineInputBuffer = samplePipelineInput.dequeueInputBuffer(); if (sampleConsumerInputBuffer == null) {
if (samplePipelineInputBuffer == null) {
return false; return false;
} }
@ -204,8 +228,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
if (decoder.isEnded()) { if (decoder.isEnded()) {
samplePipelineInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM); sampleConsumerInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
samplePipelineInput.queueInputBuffer(); sampleConsumer.queueInputBuffer();
isEnded = true; isEnded = true;
return false; return false;
} }
@ -215,11 +239,46 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false; return false;
} }
samplePipelineInputBuffer.data = pendingDecoderOutputBuffer; sampleConsumerInputBuffer.data = pendingDecoderOutputBuffer;
MediaCodec.BufferInfo bufferInfo = checkNotNull(decoder.getOutputBufferInfo()); MediaCodec.BufferInfo bufferInfo = checkNotNull(decoder.getOutputBufferInfo());
samplePipelineInputBuffer.timeUs = bufferInfo.presentationTimeUs; sampleConsumerInputBuffer.timeUs = bufferInfo.presentationTimeUs;
samplePipelineInputBuffer.setFlags(bufferInfo.flags); sampleConsumerInputBuffer.setFlags(bufferInfo.flags);
samplePipelineInput.queueInputBuffer(); sampleConsumer.queueInputBuffer();
return true;
}
/**
* Attempts to get decoded video data and pass it to the sample consumer.
*
* @return Whether it may be possible to read more data immediately by calling this method again.
* @throws TransformationException If an error occurs in the decoder.
*/
@RequiresNonNull("sampleConsumer")
private boolean feedConsumerVideoFromDecoder() throws TransformationException {
Codec decoder = checkNotNull(this.decoder);
if (decoder.isEnded()) {
sampleConsumer.signalEndOfVideoInput();
isEnded = true;
return false;
}
@Nullable MediaCodec.BufferInfo decoderOutputBufferInfo = decoder.getOutputBufferInfo();
if (decoderOutputBufferInfo == null) {
return false;
}
if (isDecodeOnlyBuffer(decoderOutputBufferInfo.presentationTimeUs)) {
decoder.releaseOutputBuffer(/* render= */ false);
return true;
}
if (maxDecoderPendingFrameCount != C.UNLIMITED_PENDING_FRAME_COUNT
&& sampleConsumer.getPendingVideoFrameCount() == maxDecoderPendingFrameCount) {
return false;
}
sampleConsumer.registerVideoFrame();
decoder.releaseOutputBuffer(/* render= */ true);
return true; return true;
} }
@ -243,33 +302,35 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return true; return true;
} }
if (decoderInputBuffer.isDecodeOnly()) {
decodeOnlyPresentationTimestamps.add(decoderInputBuffer.timeUs);
}
decoder.queueInputBuffer(decoderInputBuffer); decoder.queueInputBuffer(decoderInputBuffer);
return true; return true;
} }
/** /**
* Attempts to read input data and pass it to the sample pipeline. * Attempts to read input data and pass it to the sample consumer.
* *
* @return Whether it may be possible to read more data immediately by calling this method again. * @return Whether it may be possible to read more data immediately by calling this method again.
*/ */
@RequiresNonNull("samplePipelineInput") @RequiresNonNull("sampleConsumer")
private boolean feedPipelineFromInput() { private boolean feedConsumerFromInput() {
@Nullable @Nullable DecoderInputBuffer sampleConsumerInputBuffer = sampleConsumer.dequeueInputBuffer();
DecoderInputBuffer samplePipelineInputBuffer = samplePipelineInput.dequeueInputBuffer(); if (sampleConsumerInputBuffer == null) {
if (samplePipelineInputBuffer == null) {
return false; return false;
} }
if (!readInput(samplePipelineInputBuffer)) { if (!readInput(sampleConsumerInputBuffer)) {
return false; return false;
} }
if (shouldDropInputBuffer(samplePipelineInputBuffer)) { if (shouldDropInputBuffer(sampleConsumerInputBuffer)) {
return true; return true;
} }
samplePipelineInput.queueInputBuffer(); sampleConsumer.queueInputBuffer();
if (samplePipelineInputBuffer.isEndOfStream()) { if (sampleConsumerInputBuffer.isEndOfStream()) {
isEnded = true; isEnded = true;
return false; return false;
} }
@ -300,8 +361,8 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
/** /**
* Preprocesses an {@linkplain DecoderInputBuffer input buffer} queued to the pipeline and returns * Preprocesses an encoded {@linkplain DecoderInputBuffer input buffer} and returns whether it
* whether it should be dropped. * should be dropped.
* *
* <p>The input buffer is cleared if it should be dropped. * <p>The input buffer is cleared if it should be dropped.
*/ */
@ -323,4 +384,17 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
return shouldDropInputBuffer; return shouldDropInputBuffer;
} }
private boolean isDecodeOnlyBuffer(long presentationTimeUs) {
// We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would
// box presentationTimeUs, creating a Long object that would need to be garbage collected.
int size = decodeOnlyPresentationTimestamps.size();
for (int i = 0; i < size; i++) {
if (decodeOnlyPresentationTimestamps.get(i) == presentationTimeUs) {
decodeOnlyPresentationTimestamps.remove(i);
return true;
}
}
return false;
}
} }

View File

@ -0,0 +1,124 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.decoder.DecoderInputBuffer;
/** Consumer of encoded media samples, raw audio or raw video frames. */
@UnstableApi
public interface SampleConsumer {
/**
* Returns whether the consumer should be fed with decoded sample data. If false, encoded sample
* data should be fed.
*
* <p>Can be called on any thread.
*/
boolean expectsDecodedData();
// Methods to pass compressed input or raw audio input.
/**
* Returns a buffer if the consumer is ready to accept input, and {@code null} otherwise.
*
* <p>If the consumer is ready to accept input and this method is called multiple times before
* {@linkplain #queueInputBuffer() queuing} input, the same buffer instance is returned.
*
* <p>Should only be used for compressed data and raw audio data.
*/
@Nullable
default DecoderInputBuffer dequeueInputBuffer() {
throw new UnsupportedOperationException();
}
/**
* Informs the consumer that its input buffer contains new input.
*
* <p>Should be called after filling the input buffer from {@link #dequeueInputBuffer()} with new
* input.
*
* <p>Should only be used for compressed data and raw audio data.
*/
default void queueInputBuffer() {
throw new UnsupportedOperationException();
}
// Methods to pass raw video input.
/**
* Returns the input {@link Surface}, where the consumer reads input frames from.
*
* <p>Should only be used for raw video data.
*
* <p>Can be called on any thread.
*/
default Surface getInputSurface() {
throw new UnsupportedOperationException();
}
/**
* Returns the expected input {@link ColorInfo}.
*
* <p>Should only be used for raw video data.
*
* <p>Can be called on any thread.
*/
default ColorInfo getExpectedColorInfo() {
throw new UnsupportedOperationException();
}
/**
* Returns the number of input video frames pending in the consumer. Pending input frames are
* frames that have been {@linkplain #registerVideoFrame() registered} but not processed off the
* {@linkplain #getInputSurface() input surface} yet.
*
* <p>Should only be used for raw video data.
*
* <p>Can be called on any thread.
*/
default int getPendingVideoFrameCount() {
throw new UnsupportedOperationException();
}
/**
* Informs the consumer that a frame will be queued to the {@linkplain #getInputSurface() input
* surface}.
*
* <p>Must be called before rendering a frame to the input surface.
*
* <p>Should only be used for raw video data.
*
* <p>Can be called on any thread.
*/
default void registerVideoFrame() {
throw new UnsupportedOperationException();
}
/**
* Informs the consumer that no further input frames will be rendered.
*
* <p>Should only be used for raw video data.
*
* <p>Can be called on any thread.
*/
default void signalEndOfVideoInput() {
throw new UnsupportedOperationException();
}
}

View File

@ -16,49 +16,12 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.decoder.DecoderInputBuffer;
/** /**
* Pipeline for processing media data. * Pipeline for processing media data.
* *
* <p>This pipeline can be used to implement transformations of audio or video samples. * <p>This pipeline can be used to implement transformations of audio or video samples.
*/ */
@UnstableApi /* package */ interface SamplePipeline extends SampleConsumer {
public interface SamplePipeline {
/** Input of a {@link SamplePipeline}. */
interface Input {
/** See {@link SamplePipeline#expectsDecodedData()}. */
boolean expectsDecodedData();
/** See {@link SamplePipeline#dequeueInputBuffer()}. */
@Nullable
DecoderInputBuffer dequeueInputBuffer();
/** See {@link SamplePipeline#queueInputBuffer()}. */
void queueInputBuffer();
}
/**
* Returns whether the pipeline should be fed with decoded sample data. If false, encoded sample
* data should be queued.
*/
boolean expectsDecodedData();
/** Returns a buffer if the pipeline is ready to accept input, and {@code null} otherwise. */
@Nullable
DecoderInputBuffer dequeueInputBuffer() throws TransformationException;
/**
* Informs the pipeline that its input buffer contains new input.
*
* <p>Should be called after filling the input buffer from {@link #dequeueInputBuffer()} with new
* input.
*/
void queueInputBuffer() throws TransformationException;
/** /**
* Processes the input data and returns whether it may be possible to process more data by calling * Processes the input data and returns whether it may be possible to process more data by calling

View File

@ -29,9 +29,11 @@ import android.os.HandlerThread;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import android.os.ParcelFileDescriptor; import android.os.ParcelFileDescriptor;
import android.view.Surface;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
@ -83,8 +85,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Internal messages. // Internal messages.
private static final int MSG_START = 0; private static final int MSG_START = 0;
private static final int MSG_REGISTER_SAMPLE_PIPELINE = 1; private static final int MSG_REGISTER_SAMPLE_PIPELINE = 1;
private static final int MSG_DEQUEUE_INPUT = 2; private static final int MSG_DEQUEUE_BUFFER = 2;
private static final int MSG_QUEUE_INPUT = 3; private static final int MSG_QUEUE_BUFFER = 3;
private static final int MSG_DRAIN_PIPELINES = 4; private static final int MSG_DRAIN_PIPELINES = 4;
private static final int MSG_END = 5; private static final int MSG_END = 5;
private static final int MSG_UPDATE_PROGRESS = 6; private static final int MSG_UPDATE_PROGRESS = 6;
@ -230,11 +232,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
case MSG_REGISTER_SAMPLE_PIPELINE: case MSG_REGISTER_SAMPLE_PIPELINE:
registerSamplePipelineInternal((SamplePipeline) msg.obj); registerSamplePipelineInternal((SamplePipeline) msg.obj);
break; break;
case MSG_DEQUEUE_INPUT: case MSG_DEQUEUE_BUFFER:
dequeueInputInternal(/* samplePipelineIndex= */ msg.arg1); dequeueBufferInternal(/* samplePipelineIndex= */ msg.arg1);
break; break;
case MSG_QUEUE_INPUT: case MSG_QUEUE_BUFFER:
samplePipelines.get(/* samplePipelineIndex= */ msg.arg1).queueInputBuffer(); samplePipelines.get(/* index= */ msg.arg1).queueInputBuffer();
break; break;
case MSG_DRAIN_PIPELINES: case MSG_DRAIN_PIPELINES:
drainPipelinesInternal(); drainPipelinesInternal();
@ -271,7 +273,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
private void dequeueInputInternal(int samplePipelineIndex) throws TransformationException { private void dequeueBufferInternal(int samplePipelineIndex) throws TransformationException {
SamplePipeline samplePipeline = samplePipelines.get(samplePipelineIndex); SamplePipeline samplePipeline = samplePipelines.get(samplePipelineIndex);
// The sample pipeline is drained before dequeuing input to maximise the chances of having an // The sample pipeline is drained before dequeuing input to maximise the chances of having an
// input buffer to dequeue. // input buffer to dequeue.
@ -418,7 +420,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
@Override @Override
public SamplePipeline.Input onTrackAdded( public SampleConsumer onTrackAdded(
Format format, Format format,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
@ -434,7 +436,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
SamplePipeline samplePipeline = SamplePipeline samplePipeline =
getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs); getSamplePipeline(format, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget(); internalHandler.obtainMessage(MSG_REGISTER_SAMPLE_PIPELINE, samplePipeline).sendToTarget();
int samplePipelineIndex = tracksAddedCount; int samplePipelineIndex = tracksAddedCount;
tracksAddedCount++; tracksAddedCount++;
@ -458,7 +459,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
tracksAddedCount++; tracksAddedCount++;
} }
return new SamplePipelineInput(samplePipelineIndex, samplePipeline.expectsDecodedData()); return new SampleConsumerImpl(samplePipelineIndex, samplePipeline);
} }
// MuxerWrapper.Listener implementation. // MuxerWrapper.Listener implementation.
@ -523,7 +524,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
transformationRequest, transformationRequest,
videoEffects, videoEffects,
frameProcessorFactory, frameProcessorFactory,
decoderFactory,
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,
/* errorConsumer= */ this::onTransformationError, /* errorConsumer= */ this::onTransformationError,
@ -622,19 +622,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return false; return false;
} }
private class SamplePipelineInput implements SamplePipeline.Input { private class SampleConsumerImpl implements SampleConsumer {
private final int samplePipelineIndex; private final int samplePipelineIndex;
private final boolean expectsDecodedData; private final SamplePipeline samplePipeline;
public SamplePipelineInput(int samplePipelineIndex, boolean expectsDecodedData) { public SampleConsumerImpl(int samplePipelineIndex, SamplePipeline samplePipeline) {
this.samplePipelineIndex = samplePipelineIndex; this.samplePipelineIndex = samplePipelineIndex;
this.expectsDecodedData = expectsDecodedData; this.samplePipeline = samplePipeline;
} }
@Override @Override
public boolean expectsDecodedData() { public boolean expectsDecodedData() {
return expectsDecodedData; return samplePipeline.expectsDecodedData();
} }
@Nullable @Nullable
@ -649,7 +649,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// start of the sample pipelines). Having 2 thread hops per sample (one for dequeuing and // start of the sample pipelines). Having 2 thread hops per sample (one for dequeuing and
// one for queuing) makes transmuxing slower than it used to be. // one for queuing) makes transmuxing slower than it used to be.
internalHandler internalHandler
.obtainMessage(MSG_DEQUEUE_INPUT, samplePipelineIndex, /* unused */ 0) .obtainMessage(MSG_DEQUEUE_BUFFER, samplePipelineIndex, /* unused */ 0)
.sendToTarget(); .sendToTarget();
clock.onThreadBlocked(); clock.onThreadBlocked();
dequeueBufferConditionVariable.blockUninterruptible(); dequeueBufferConditionVariable.blockUninterruptible();
@ -660,9 +660,34 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void queueInputBuffer() { public void queueInputBuffer() {
internalHandler internalHandler
.obtainMessage(MSG_QUEUE_INPUT, samplePipelineIndex, /* unused */ 0) .obtainMessage(MSG_QUEUE_BUFFER, samplePipelineIndex, /* unused */ 0)
.sendToTarget(); .sendToTarget();
} }
@Override
public Surface getInputSurface() {
return samplePipeline.getInputSurface();
}
@Override
public ColorInfo getExpectedColorInfo() {
return samplePipeline.getExpectedColorInfo();
}
@Override
public int getPendingVideoFrameCount() {
return samplePipeline.getPendingVideoFrameCount();
}
@Override
public void registerVideoFrame() {
samplePipeline.registerVideoFrame();
}
@Override
public void signalEndOfVideoInput() {
samplePipeline.signalEndOfVideoInput();
}
} }
} }
} }

View File

@ -49,23 +49,15 @@ import androidx.media3.effect.ScaleToFitTransformation;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.dataflow.qual.Pure; import org.checkerframework.dataflow.qual.Pure;
/** /** Pipeline to process, re-encode and mux raw video frames. */
* Pipeline to decode video samples, apply transformations on the raw samples, and re-encode them.
*/
/* package */ final class VideoTranscodingSamplePipeline extends BaseSamplePipeline { /* package */ final class VideoTranscodingSamplePipeline extends BaseSamplePipeline {
private final int maxPendingFrameCount;
private final DecoderInputBuffer decoderInputBuffer;
private final Codec decoder;
private final ArrayList<Long> decodeOnlyPresentationTimestamps;
private final FrameProcessor frameProcessor; private final FrameProcessor frameProcessor;
private final ColorInfo frameProcessorInputColor;
private final EncoderWrapper encoderWrapper; private final EncoderWrapper encoderWrapper;
private final DecoderInputBuffer encoderOutputBuffer; private final DecoderInputBuffer encoderOutputBuffer;
@ -84,7 +76,6 @@ import org.checkerframework.dataflow.qual.Pure;
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
FrameProcessor.Factory frameProcessorFactory, FrameProcessor.Factory frameProcessorFactory,
Codec.DecoderFactory decoderFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
Consumer<TransformationException> errorConsumer, Consumer<TransformationException> errorConsumer,
@ -131,11 +122,8 @@ import org.checkerframework.dataflow.qual.Pure;
finalFramePresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderOutputBuffer = encoderOutputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED); new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
decodeOnlyPresentationTimestamps = new ArrayList<>();
// The decoder rotates encoded frames for display by inputFormat.rotationDegrees. // The decoder rotates encoded frames for display by inputFormat.rotationDegrees.
int decodedWidth = int decodedWidth =
@ -169,7 +157,7 @@ import org.checkerframework.dataflow.qual.Pure;
ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor(); ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor();
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor, // If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping. // possibly by tone mapping.
ColorInfo frameProcessorInputColor = frameProcessorInputColor =
isGlToneMapping ? checkNotNull(inputFormat.colorInfo) : encoderInputColor; isGlToneMapping ? checkNotNull(inputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with // For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as // C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
@ -236,57 +224,42 @@ import org.checkerframework.dataflow.qual.Pure;
frameProcessor.setInputFrameInfo( frameProcessor.setInputFrameInfo(
new FrameInfo( new FrameInfo(
decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio, streamOffsetUs)); decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio, streamOffsetUs));
boolean isDecoderToneMappingRequired =
ColorInfo.isTransferHdr(inputFormat.colorInfo)
&& !ColorInfo.isTransferHdr(frameProcessorInputColor);
decoder =
decoderFactory.createForVideoDecoding(
inputFormat, frameProcessor.getInputSurface(), isDecoderToneMappingRequired);
maxPendingFrameCount = decoder.getMaxPendingFrameCount();
} }
@Override @Override
public boolean expectsDecodedData() { public Surface getInputSurface() {
return false; return frameProcessor.getInputSurface();
} }
@Override @Override
@Nullable public ColorInfo getExpectedColorInfo() {
public DecoderInputBuffer dequeueInputBuffer() throws TransformationException { return frameProcessorInputColor;
return decoder.maybeDequeueInputBuffer(decoderInputBuffer) ? decoderInputBuffer : null;
} }
@Override @Override
public void queueInputBuffer() throws TransformationException { public void registerVideoFrame() {
if (decoderInputBuffer.isDecodeOnly()) { frameProcessor.registerInputFrame();
decodeOnlyPresentationTimestamps.add(decoderInputBuffer.timeUs); }
}
decoder.queueInputBuffer(decoderInputBuffer); @Override
public int getPendingVideoFrameCount() {
return frameProcessor.getPendingInputFrameCount();
}
@Override
public void signalEndOfVideoInput() {
frameProcessor.signalEndOfInput();
} }
@Override @Override
public void release() { public void release() {
frameProcessor.release(); frameProcessor.release();
decoder.release();
encoderWrapper.release(); encoderWrapper.release();
} }
@Override @Override
protected boolean processDataUpToMuxer() throws TransformationException { protected boolean processDataUpToMuxer() {
if (decoder.isEnded()) { return false;
return false;
}
boolean processedData = false;
while (maybeProcessDecoderOutput()) {
processedData = true;
}
if (decoder.isEnded()) {
frameProcessor.signalEndOfInput();
}
// If the decoder produced output, signal that it may be possible to process data again.
return processedData;
} }
@Override @Override
@ -377,46 +350,6 @@ import org.checkerframework.dataflow.qual.Pure;
|| Build.ID.startsWith(/* Pixel Watch */ "rwd9.220429.053")); || Build.ID.startsWith(/* Pixel Watch */ "rwd9.220429.053"));
} }
/**
* Feeds at most one decoder output frame to the next step of the pipeline.
*
* @return Whether a frame was processed.
* @throws TransformationException If a problem occurs while processing the frame.
*/
private boolean maybeProcessDecoderOutput() throws TransformationException {
@Nullable MediaCodec.BufferInfo decoderOutputBufferInfo = decoder.getOutputBufferInfo();
if (decoderOutputBufferInfo == null) {
return false;
}
if (isDecodeOnlyBuffer(decoderOutputBufferInfo.presentationTimeUs)) {
decoder.releaseOutputBuffer(/* render= */ false);
return true;
}
if (maxPendingFrameCount != C.UNLIMITED_PENDING_FRAME_COUNT
&& frameProcessor.getPendingInputFrameCount() == maxPendingFrameCount) {
return false;
}
frameProcessor.registerInputFrame();
decoder.releaseOutputBuffer(/* render= */ true);
return true;
}
private boolean isDecodeOnlyBuffer(long presentationTimeUs) {
// We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would
// box presentationTimeUs, creating a Long object that would need to be garbage collected.
int size = decodeOnlyPresentationTimestamps.size();
for (int i = 0; i < size; i++) {
if (decodeOnlyPresentationTimestamps.get(i) == presentationTimeUs) {
decodeOnlyPresentationTimestamps.remove(i);
return true;
}
}
return false;
}
/** /**
* Wraps an {@linkplain Codec encoder} and provides its input {@link Surface}. * Wraps an {@linkplain Codec encoder} and provides its input {@link Surface}.
* *

View File

@ -68,7 +68,7 @@ public class ExoPlayerAssetLoaderTest {
} }
@Override @Override
public SamplePipeline.Input onTrackAdded( public SampleConsumer onTrackAdded(
Format format, Format format,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs, long streamStartPositionUs,
@ -81,7 +81,7 @@ public class ExoPlayerAssetLoaderTest {
new IllegalStateException("onTrackAdded() called before onTrackCount()")); new IllegalStateException("onTrackAdded() called before onTrackCount()"));
} }
isTrackAdded.set(true); isTrackAdded.set(true);
return new FakeSamplePipelineInput(); return new FakeSampleConsumer();
} }
@Override @Override
@ -130,7 +130,7 @@ public class ExoPlayerAssetLoaderTest {
.createAssetLoader(); .createAssetLoader();
} }
private static final class FakeSamplePipelineInput implements SamplePipeline.Input { private static final class FakeSampleConsumer implements SampleConsumer {
@Override @Override
public boolean expectsDecodedData() { public boolean expectsDecodedData() {

View File

@ -633,18 +633,18 @@ public final class TransformerEndToEndTest {
@Test @Test
public void startTransformation_withAssetLoaderAlwaysDecoding_pipelineExpectsDecoded() public void startTransformation_withAssetLoaderAlwaysDecoding_pipelineExpectsDecoded()
throws Exception { throws Exception {
AtomicReference<SamplePipeline.Input> samplePipelineInputRef = new AtomicReference<>(); AtomicReference<SampleConsumer> sampleConsumerRef = new AtomicReference<>();
Transformer transformer = Transformer transformer =
createTransformerBuilder(/* enableFallback= */ false) createTransformerBuilder(/* enableFallback= */ false)
.setAssetLoaderFactory( .setAssetLoaderFactory(
new FakeAssetLoader.Factory(SUPPORTED_OUTPUT_TYPE_DECODED, samplePipelineInputRef)) new FakeAssetLoader.Factory(SUPPORTED_OUTPUT_TYPE_DECODED, sampleConsumerRef))
.build(); .build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO); MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath); transformer.startTransformation(mediaItem, outputPath);
runLooperUntil(transformer.getApplicationLooper(), () -> samplePipelineInputRef.get() != null); runLooperUntil(transformer.getApplicationLooper(), () -> sampleConsumerRef.get() != null);
assertThat(samplePipelineInputRef.get().expectsDecodedData()).isTrue(); assertThat(sampleConsumerRef.get().expectsDecodedData()).isTrue();
} }
@Test @Test
@ -654,7 +654,7 @@ public final class TransformerEndToEndTest {
.setAudioProcessors(ImmutableList.of(new SonicAudioProcessor())) .setAudioProcessors(ImmutableList.of(new SonicAudioProcessor()))
.setAssetLoaderFactory( .setAssetLoaderFactory(
new FakeAssetLoader.Factory( new FakeAssetLoader.Factory(
SUPPORTED_OUTPUT_TYPE_ENCODED, /* samplePipelineInputRef= */ null)) SUPPORTED_OUTPUT_TYPE_ENCODED, /* sampleConsumerRef= */ null))
.build(); .build();
MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO); MediaItem mediaItem = MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_VIDEO);
@ -1077,15 +1077,15 @@ public final class TransformerEndToEndTest {
public static final class Factory implements AssetLoader.Factory { public static final class Factory implements AssetLoader.Factory {
private final @SupportedOutputTypes int supportedOutputTypes; private final @SupportedOutputTypes int supportedOutputTypes;
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef; @Nullable private final AtomicReference<SampleConsumer> sampleConsumerRef;
@Nullable private AssetLoader.Listener listener; @Nullable private AssetLoader.Listener listener;
public Factory( public Factory(
@SupportedOutputTypes int supportedOutputTypes, @SupportedOutputTypes int supportedOutputTypes,
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) { @Nullable AtomicReference<SampleConsumer> sampleConsumerRef) {
this.supportedOutputTypes = supportedOutputTypes; this.supportedOutputTypes = supportedOutputTypes;
this.samplePipelineInputRef = samplePipelineInputRef; this.sampleConsumerRef = sampleConsumerRef;
} }
@Override @Override
@ -1136,22 +1136,21 @@ public final class TransformerEndToEndTest {
@Override @Override
public AssetLoader createAssetLoader() { public AssetLoader createAssetLoader() {
return new FakeAssetLoader( return new FakeAssetLoader(checkNotNull(listener), supportedOutputTypes, sampleConsumerRef);
checkNotNull(listener), supportedOutputTypes, samplePipelineInputRef);
} }
} }
private final AssetLoader.Listener listener; private final AssetLoader.Listener listener;
private final @SupportedOutputTypes int supportedOutputTypes; private final @SupportedOutputTypes int supportedOutputTypes;
@Nullable private final AtomicReference<SamplePipeline.Input> samplePipelineInputRef; @Nullable private final AtomicReference<SampleConsumer> sampleConsumerRef;
public FakeAssetLoader( public FakeAssetLoader(
Listener listener, Listener listener,
@SupportedOutputTypes int supportedOutputTypes, @SupportedOutputTypes int supportedOutputTypes,
@Nullable AtomicReference<SamplePipeline.Input> samplePipelineInputRef) { @Nullable AtomicReference<SampleConsumer> sampleConsumerRef) {
this.listener = listener; this.listener = listener;
this.supportedOutputTypes = supportedOutputTypes; this.supportedOutputTypes = supportedOutputTypes;
this.samplePipelineInputRef = samplePipelineInputRef; this.sampleConsumerRef = sampleConsumerRef;
} }
@Override @Override
@ -1165,14 +1164,14 @@ public final class TransformerEndToEndTest {
.setChannelCount(2) .setChannelCount(2)
.build(); .build();
try { try {
SamplePipeline.Input samplePipelineInput = SampleConsumer sampleConsumer =
listener.onTrackAdded( listener.onTrackAdded(
format, format,
supportedOutputTypes, supportedOutputTypes,
/* streamStartPositionUs= */ 0, /* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0); /* streamOffsetUs= */ 0);
if (samplePipelineInputRef != null) { if (sampleConsumerRef != null) {
samplePipelineInputRef.set(samplePipelineInput); sampleConsumerRef.set(sampleConsumer);
} }
} catch (TransformationException e) { } catch (TransformationException e) {
throw new IllegalStateException(e); throw new IllegalStateException(e);