Do not force EOS when decoder has produced all frames

The workaround in ExternalTextureManager.forceSignalEndOfStream
was being applied even when the decoder did output all frames, and only
the GL pipeline was slow.

This change ensures that workaround is not applied when the decoder
has already produced all output frames.

PiperOrigin-RevId: 680471587
This commit is contained in:
dancho 2024-09-30 01:45:59 -07:00 committed by Copybara-Service
parent b0b54ca018
commit 7b08bedf2c
3 changed files with 94 additions and 21 deletions

View File

@ -72,9 +72,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* stream is considered to have ended, even if not all expected frames have been received from the
* decoder. This has been observed on some decoders.
*
* <p>Some emulator decoders are slower, hence using a longer timeout. Also on some emulators, GL
* operation takes a long time to finish, the timeout could be a result of slow GL operation back
* pressured the decoder, and the decoder is not able to decode another frame.
* <p>Some emulator decoders are slower, hence using a longer timeout.
*/
// LINT.IfChange(SURFACE_TEXTURE_TIMEOUT_MS)
private static final long SURFACE_TEXTURE_TIMEOUT_MS = isRunningOnEmulator() ? 20_000 : 500;
@ -353,6 +351,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private void forceSignalEndOfStream() {
if (availableFrameCount == pendingFrames.size()) {
// All frames received from decoder. Do not force end of stream.
return;
}
Log.w(
TAG,
Util.formatInvariant(

View File

@ -968,6 +968,35 @@ public final class AndroidTestUtil {
public static final AssetInfo WAV_ASSET =
new AssetInfo.Builder("asset:///media/wav/sample.wav").build();
/** A {@link GlEffect} that adds delay in the video pipeline by putting the thread to sleep. */
public static final class DelayEffect implements GlEffect {
private final long delayMs;
public DelayEffect(long delayMs) {
this.delayMs = delayMs;
}
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) {
return new PassthroughShaderProgram() {
@Override
public void queueInputFrame(
GlObjectsProvider glObjectsProvider,
GlTextureInfo inputTexture,
long presentationTimeUs) {
try {
Thread.sleep(delayMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
onError(e);
return;
}
super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
}
};
}
}
/**
* Creates the GL objects needed to set up a GL environment including an {@link EGLDisplay} and an
* {@link EGLContext}.

View File

@ -18,25 +18,32 @@ package androidx.media3.transformer;
import static androidx.media3.transformer.AndroidTestUtil.FORCE_TRANSCODE_VIDEO_EFFECTS;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeTrue;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.extractor.mp4.Mp4Extractor;
import androidx.media3.extractor.text.DefaultSubtitleParserFactory;
import androidx.media3.test.utils.FakeExtractorOutput;
import androidx.media3.test.utils.TestUtil;
import androidx.media3.transformer.AndroidTestUtil.DelayEffect;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.json.JSONException;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -65,9 +72,9 @@ public class ForceEndOfStreamTest {
@Test
public void transcode_decoderDroppingLastFourFrames_exportSucceeds() throws Exception {
if (skipTestBelowApi29(context, testId)) {
return;
}
// TODO: b/370050055 - do we need API 29+, or the device list from
// Util.isFrameDropAllowedOnSurfaceInput?
assumeTrue(Util.SDK_INT >= 29);
assumeFormatsSupported(
context,
testId,
@ -80,7 +87,6 @@ public class ForceEndOfStreamTest {
.build()
.run(testId, createComposition(MediaItem.fromUri(MP4_ASSET.uri)));
assertThat(testResult.analysisException).isNull();
assertThat(testResult.exportResult.videoFrameCount)
.isEqualTo(MP4_ASSET.videoFrameCount - framesToSkip);
assertThat(new File(testResult.filePath).length()).isGreaterThan(0);
@ -88,9 +94,9 @@ public class ForceEndOfStreamTest {
@Test
public void transcode_decoderDroppingNoFrame_exportSucceeds() throws Exception {
if (skipTestBelowApi29(context, testId)) {
return;
}
// TODO: b/370050055 - do we need API 29+, or the device list from
// Util.isFrameDropAllowedOnSurfaceInput?
assumeTrue(Util.SDK_INT >= 29);
assumeFormatsSupported(
context,
testId,
@ -103,19 +109,55 @@ public class ForceEndOfStreamTest {
.build()
.run(testId, createComposition(MediaItem.fromUri(MP4_ASSET.uri)));
assertThat(testResult.analysisException).isNull();
assertThat(testResult.exportResult.videoFrameCount).isEqualTo(MP4_ASSET.videoFrameCount);
assertThat(new File(testResult.filePath).length()).isGreaterThan(0);
}
private static boolean skipTestBelowApi29(Context context, String testId)
throws JSONException, IOException {
if (Util.SDK_INT < 29) {
AndroidTestUtil.recordTestSkipped(
context, testId, /* reason= */ "Decoder frame dropping is possible from API29.");
return true;
}
return false;
@Test
public void transcode_withSlowVideoEffect_exportSucceedsWithCorrectNumberOfFrames()
throws Exception {
// TODO: b/370050055 - do we need API 29+, or the device list from
// Util.isFrameDropAllowedOnSurfaceInput?
assumeTrue(Util.SDK_INT >= 29);
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
/* outputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat);
// Use MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S because it's widely supported.
// Clip to 30 frames, because we need a DelayEffect(200ms) to be applied for each frame.
// Processing too many frames would make this test unnecessarily slow.
MediaItem mediaItemClippedTo30Frames =
new MediaItem.Builder()
.setUri(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(495).build())
.build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addItem(
new EditedMediaItem.Builder(mediaItemClippedTo30Frames)
.setRemoveAudio(true)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(
new DelayEffect(/* delayMs= */ 200))))
.build())
.build())
.build();
Transformer transformer = new Transformer.Builder(context).build();
ExportTestResult testResult =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), testResult.filePath);
fakeExtractorOutput.track(0, C.TRACK_TYPE_VIDEO).assertSampleCount(30);
}
private static Transformer buildTransformer(Context context, int framesToSkip) {