Add a tracing utility for debugging video related muxer timeout

PiperOrigin-RevId: 529632886
This commit is contained in:
claincly 2023-05-05 07:22:14 +00:00 committed by Tofunmi Adigun-Hameed
parent 213740c456
commit 83b5f824f8
9 changed files with 229 additions and 5 deletions

View File

@ -0,0 +1,197 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import androidx.annotation.GuardedBy;
import androidx.media3.common.C;
import androidx.media3.common.util.SystemClock;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import com.google.common.base.Joiner;
import java.util.ArrayDeque;
import java.util.Queue;
/** A debugging tracing utility. */
@UnstableApi
public final class DebugTraceUtil {
private static final int MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT = 10;
/** The timestamps at which the decoder received end of stream signal, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> DECODER_RECEIVE_EOS_TIMES_MS = new ArrayDeque<>();
/** The timestamps at which the decoder signalled end of stream, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> DECODER_SIGNAL_EOS_TIMES_MS = new ArrayDeque<>();
/**
* The timestamps at which {@code VideoFrameProcessor} received end of stream signal from the
* decoder, in milliseconds.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS =
new ArrayDeque<>();
/**
* The timestamps at which {@code VideoFrameProcessor} signalled end of stream, in milliseconds.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS = new ArrayDeque<>();
/** The timestamps at which the encoder received end of stream signal, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> ENCODER_RECEIVE_EOS_TIMES_MS = new ArrayDeque<>();
/**
* The last {@link #MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT} values returned by {@code
* MuxerWrapper#canWriteSample}.
*/
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Boolean> MUXER_CAN_WRITE_VIDEO_SAMPLE = new ArrayDeque<>();
/** The timestamps at which the muxer video track is stopped, in milliseconds. */
@GuardedBy("DebugTraceUtil.class")
private static final Queue<Long> MUXER_TRACK_END_TIMES_MS = new ArrayDeque<>();
/** The number of decoded frames. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfDecodedFrames = 0;
/** The number of frames made available on {@code VideoFrameProcessor}'s input surface. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfFramesRenderedToVideoFrameProcessorInput = 0;
/** The number of frames rendered to {@code VideoFrameProcessor}'s output. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfFramesRenderedToVideoFrameProcessorOutput = 0;
/** The number of encoded frames. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfEncodedFrames = 0;
/** The number of video frames written to the muxer. */
@GuardedBy("DebugTraceUtil.class")
private static int numberOfMuxedFrames = 0;
private DebugTraceUtil() {}
public static synchronized void reset() {
numberOfDecodedFrames = 0;
numberOfFramesRenderedToVideoFrameProcessorInput = 0;
numberOfFramesRenderedToVideoFrameProcessorOutput = 0;
numberOfEncodedFrames = 0;
numberOfMuxedFrames = 0;
DECODER_RECEIVE_EOS_TIMES_MS.clear();
DECODER_SIGNAL_EOS_TIMES_MS.clear();
VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS.clear();
VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS.clear();
ENCODER_RECEIVE_EOS_TIMES_MS.clear();
MUXER_CAN_WRITE_VIDEO_SAMPLE.clear();
MUXER_TRACK_END_TIMES_MS.clear();
}
public static synchronized void recordDecodedFrame() {
numberOfDecodedFrames++;
}
public static synchronized void recordFrameRenderedToVideoFrameProcessorInput() {
numberOfFramesRenderedToVideoFrameProcessorInput++;
}
public static synchronized void recordFrameRenderedToVideoFrameProcessorOutput() {
numberOfFramesRenderedToVideoFrameProcessorOutput++;
}
public static synchronized void recordEncodedFrame() {
numberOfEncodedFrames++;
}
public static synchronized void recordMuxerInput(@C.TrackType int trackType) {
if (trackType == C.TRACK_TYPE_VIDEO) {
numberOfMuxedFrames++;
}
}
public static synchronized void recordDecoderReceiveEos() {
DECODER_RECEIVE_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordDecoderSignalEos() {
DECODER_SIGNAL_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordVideoFrameProcessorReceiveDecoderEos() {
VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordVideoFrameProcessorSignalEos() {
VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordEncoderReceiveEos() {
ENCODER_RECEIVE_EOS_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
public static synchronized void recordMuxerCanAddSample(
@C.TrackType int trackType, boolean canAddSample) {
if (trackType == C.TRACK_TYPE_VIDEO) {
if (MUXER_CAN_WRITE_VIDEO_SAMPLE.size() == MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT) {
MUXER_CAN_WRITE_VIDEO_SAMPLE.poll();
}
MUXER_CAN_WRITE_VIDEO_SAMPLE.add(canAddSample);
}
}
public static synchronized void recordMuxerTrackEnded(@C.TrackType int trackType) {
if (trackType == C.TRACK_TYPE_VIDEO) {
MUXER_TRACK_END_TIMES_MS.add(SystemClock.DEFAULT.elapsedRealtime());
}
}
public static synchronized String generateTrace() {
return "Decoded: "
+ numberOfDecodedFrames
+ ", Rendered to VFP: "
+ numberOfFramesRenderedToVideoFrameProcessorInput
+ ", Rendered to encoder: "
+ numberOfFramesRenderedToVideoFrameProcessorOutput
+ ", Encoded: "
+ numberOfEncodedFrames
+ ", Muxed: "
+ numberOfMuxedFrames
+ ", Decoder receive EOS: "
+ generateString(DECODER_RECEIVE_EOS_TIMES_MS)
+ ", Decoder signal EOS: "
+ generateString(DECODER_SIGNAL_EOS_TIMES_MS)
+ ", VFP receive EOS: "
+ generateString(VIDEO_FRAME_PROCESSOR_RECEIVE_DECODER_EOS_TIMES_MS)
+ ", VFP signal EOS: "
+ generateString(VIDEO_FRAME_PROCESSOR_SIGNAL_EOS_TIMES_MS)
+ ", Encoder receive EOS: "
+ generateString(ENCODER_RECEIVE_EOS_TIMES_MS)
+ Util.formatInvariant(
", Muxer last %d video canWriteSample: ", MUXER_CAN_WRITE_SAMPLE_RECORD_COUNT)
+ generateString(MUXER_CAN_WRITE_VIDEO_SAMPLE)
+ ", Muxer stopped: "
+ generateString(MUXER_TRACK_END_TIMES_MS);
}
private static <T> String generateString(Queue<T> queue) {
return queue.isEmpty() ? "NO" : Util.formatInvariant("[%s]", Joiner.on(',').join(queue));
}
}

View File

@ -419,6 +419,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override @Override
public void signalEndOfInput() { public void signalEndOfInput() {
DebugTraceUtil.recordVideoFrameProcessorReceiveDecoderEos();
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
inputStreamEnded = true; inputStreamEnded = true;
boolean allInputStreamsProcessed; boolean allInputStreamsProcessed;

View File

@ -97,6 +97,7 @@ import java.util.concurrent.atomic.AtomicInteger;
unused -> unused ->
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
DebugTraceUtil.recordFrameRenderedToVideoFrameProcessorInput();
if (numberOfFramesToDropOnBecomingAvailable > 0) { if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--; numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();

View File

@ -181,6 +181,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean frameProcessingEnded = boolean frameProcessingEnded =
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed(); checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
if (frameProcessingEnded) { if (frameProcessingEnded) {
DebugTraceUtil.recordVideoFrameProcessorSignalEos();
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded); videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
} }
} }
@ -325,6 +326,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
? System.nanoTime() ? System.nanoTime()
: renderTimeNs); : renderTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
DebugTraceUtil.recordFrameRenderedToVideoFrameProcessorOutput();
} }
private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs) private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs)

View File

@ -41,6 +41,7 @@ import androidx.media3.common.util.TraceUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.checkerframework.checker.initialization.qual.UnknownInitialization; import org.checkerframework.checker.initialization.qual.UnknownInitialization;
@ -214,6 +215,9 @@ public final class DefaultCodec implements Codec {
if (inputBuffer.isEndOfStream()) { if (inputBuffer.isEndOfStream()) {
inputStreamEnded = true; inputStreamEnded = true;
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
if (isVideo && isDecoder) {
DebugTraceUtil.recordDecoderReceiveEos();
}
} }
try { try {
mediaCodec.queueInputBuffer(inputBufferIndex, offset, size, inputBuffer.timeUs, flags); mediaCodec.queueInputBuffer(inputBufferIndex, offset, size, inputBuffer.timeUs, flags);
@ -227,6 +231,7 @@ public final class DefaultCodec implements Codec {
@Override @Override
public void signalEndOfInputStream() throws ExportException { public void signalEndOfInputStream() throws ExportException {
DebugTraceUtil.recordEncoderReceiveEos();
try { try {
mediaCodec.signalEndOfInputStream(); mediaCodec.signalEndOfInputStream();
} catch (RuntimeException e) { } catch (RuntimeException e) {

View File

@ -24,6 +24,7 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -127,6 +128,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@RequiresNonNull({"sampleConsumer", "decoder"}) @RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException { protected boolean feedConsumerFromDecoder() throws ExportException {
if (decoder.isEnded()) { if (decoder.isEnded()) {
DebugTraceUtil.recordDecoderSignalEos();
sampleConsumer.signalEndOfVideoInput(); sampleConsumer.signalEndOfVideoInput();
isEnded = true; isEnded = true;
return false; return false;
@ -154,6 +156,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
decoder.releaseOutputBuffer(presentationTimeUs); decoder.releaseOutputBuffer(presentationTimeUs);
DebugTraceUtil.recordDecodedFrame();
return true; return true;
} }

View File

@ -22,6 +22,7 @@ import static androidx.media3.common.util.Assertions.checkState;
import static java.lang.Math.max; import static java.lang.Math.max;
import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.util.Log;
import android.util.SparseArray; import android.util.SparseArray;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -30,6 +31,7 @@ import androidx.media3.common.Format;
import androidx.media3.common.Metadata; import androidx.media3.common.Metadata;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.DebugTraceUtil;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.io.File; import java.io.File;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -47,6 +49,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
*/ */
/* package */ final class MuxerWrapper { /* package */ final class MuxerWrapper {
private static final String MUXER_TIMEOUT_ERROR_FORMAT_STRING =
"Abort: no output sample written in the last %d milliseconds. DebugTrace: %s";
public interface Listener { public interface Listener {
void onTrackEnded( void onTrackEnded(
@C.TrackType int trackType, Format format, int averageBitrate, int sampleCount); @C.TrackType int trackType, Format format, int averageBitrate, int sampleCount);
@ -191,8 +196,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
// SparseArray.get() returns null by default if the value is not found. // SparseArray.get() returns null by default if the value is not found.
checkArgument( checkArgument(
trackInfo != null, "Could not write sample because there is no track of type " + trackType); trackInfo != null, "Could not write sample because there is no track of type " + trackType);
boolean canWriteSample = canWriteSample(trackType, presentationTimeUs);
if (!canWriteSample(trackType, presentationTimeUs)) { DebugTraceUtil.recordMuxerCanAddSample(trackType, canWriteSample);
if (!canWriteSample) {
return false; return false;
} }
@ -204,6 +210,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
resetAbortTimer(); resetAbortTimer();
muxer.writeSampleData( muxer.writeSampleData(
trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0); trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0);
DebugTraceUtil.recordMuxerInput(trackType);
previousTrackType = trackType; previousTrackType = trackType;
return true; return true;
} }
@ -225,12 +232,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
listener.onTrackEnded( listener.onTrackEnded(
trackType, trackInfo.format, trackInfo.getAverageBitrate(), trackInfo.sampleCount); trackType, trackInfo.format, trackInfo.getAverageBitrate(), trackInfo.sampleCount);
DebugTraceUtil.recordMuxerTrackEnded(trackType);
trackTypeToInfo.delete(trackType); trackTypeToInfo.delete(trackType);
if (trackTypeToInfo.size() == 0) { if (trackTypeToInfo.size() == 0) {
abortScheduledExecutorService.shutdownNow(); abortScheduledExecutorService.shutdownNow();
if (!isEnded) { if (!isEnded) {
isEnded = true; isEnded = true;
listener.onEnded(Util.usToMs(maxEndedTrackTimeUs), getCurrentOutputSizeBytes()); listener.onEnded(Util.usToMs(maxEndedTrackTimeUs), getCurrentOutputSizeBytes());
Log.e("LYC", DebugTraceUtil.generateTrace());
} }
} }
} }
@ -298,9 +308,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
listener.onError( listener.onError(
ExportException.createForMuxer( ExportException.createForMuxer(
new IllegalStateException( new IllegalStateException(
"No output sample written in the last " Util.formatInvariant(
+ maxDelayBetweenSamplesMs MUXER_TIMEOUT_ERROR_FORMAT_STRING,
+ " milliseconds. Aborting transformation."), maxDelayBetweenSamplesMs,
DebugTraceUtil.generateTrace())),
ExportException.ERROR_CODE_MUXING_TIMEOUT)); ExportException.ERROR_CODE_MUXING_TIMEOUT));
}, },
maxDelayBetweenSamplesMs, maxDelayBetweenSamplesMs,

View File

@ -38,6 +38,7 @@ import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.ListenerSet; import androidx.media3.common.util.ListenerSet;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.DefaultVideoFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation; import androidx.media3.effect.Presentation;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory; import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
@ -712,6 +713,7 @@ public final class Transformer {
HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null); HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null);
FallbackListener fallbackListener = FallbackListener fallbackListener =
new FallbackListener(composition, listeners, applicationHandler, transformationRequest); new FallbackListener(composition, listeners, applicationHandler, transformationRequest);
DebugTraceUtil.reset();
transformerInternal = transformerInternal =
new TransformerInternal( new TransformerInternal(
context, context,

View File

@ -49,6 +49,7 @@ import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.Presentation; import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
@ -289,6 +290,7 @@ import org.checkerframework.dataflow.qual.Pure;
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs; bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
} }
} }
DebugTraceUtil.recordEncodedFrame();
encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs; encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs;
encoderOutputBuffer.setFlags(bufferInfo.flags); encoderOutputBuffer.setFlags(bufferInfo.flags);
return encoderOutputBuffer; return encoderOutputBuffer;