Revise seeking in ExternalTextureManager (ETM)

After this CL, DVFP waits for flushing until all frames registered previously
arrives.

Previously, ETM records the difference between the number of registered frames,
and the number of frames arrivd on the SurfaceTexture, when flushing. (Note
that ETM is flushed the last in the chain, as flushing is done backwards from
FinalShaderProgramWrapper). ETM then waits until the number of frames arrive
after flush.

The normal flow is, MediaCodecVideoRenderer (MCVR) registers a new decoded
frame, in `processOutputBuffer()` to DVFP, MCVR call `codec.releaseOutputBuffer()`
to have MediaCodec render the frame, and then the frame arrives in DVFP's ETM.

However there might be a discrepancy. When registering the frame, ETM records
the frame on the calling thread, ~instantly. Later when the rendered frame
arrive, ETM records a frame is available on the task executor thread (or
commonly known as the GL thread). More specifically, when a frame arrives
in `onFrameAvailableListener`, ETM posts all subsequent processing to
the task executor. When seeking, the task executor is flushed as the first
step. It might be a frame that has already arrived on ETM, and the processing
of such frame has already been queued into the task executor; only to be
flushed as a result of flushing the task executor. If this happens, the frame
is considered to be never have arrived. This causes a freeze on the app,
because ETM'll wait until this frame arrives to declare flushing has completed.

PiperOrigin-RevId: 631524332
This commit is contained in:
claincly 2024-05-07 13:13:05 -07:00 committed by Copybara-Service
parent 6ac60c6dff
commit bef3d518d2
4 changed files with 227 additions and 31 deletions

View File

@ -647,10 +647,13 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
// Flush from the end of the GlShaderProgram pipeline up to the start. // Flush from the end of the GlShaderProgram pipeline up to the start.
CountDownLatch latch = new CountDownLatch(1); CountDownLatch latch = new CountDownLatch(1);
inputSwitcher.activeTextureManager().setOnFlushCompleteListener(latch::countDown); TextureManager textureManager = inputSwitcher.activeTextureManager();
textureManager.releaseAllRegisteredFrames();
textureManager.setOnFlushCompleteListener(latch::countDown);
videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush); videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await(); latch.await();
inputSwitcher.activeTextureManager().setOnFlushCompleteListener(null); textureManager.setOnFlushCompleteListener(null);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
} }

View File

@ -31,9 +31,11 @@ import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.SystemClock;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -59,6 +61,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/ */
private static final long SURFACE_TEXTURE_TIMEOUT_MS = isRunningOnEmulator() ? 10_000 : 500; private static final long SURFACE_TEXTURE_TIMEOUT_MS = isRunningOnEmulator() ? 10_000 : 500;
// Wait delay between checking whether a registered frame arrives on the SurfaceTexture.
private static final long SURFACE_TEXTURE_WAIT_DELAY_MS = 10;
private final GlObjectsProvider glObjectsProvider; private final GlObjectsProvider glObjectsProvider;
private @MonotonicNonNull ExternalShaderProgram externalShaderProgram; private @MonotonicNonNull ExternalShaderProgram externalShaderProgram;
private final int externalTexId; private final int externalTexId;
@ -66,12 +71,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final SurfaceTexture surfaceTexture; private final SurfaceTexture surfaceTexture;
private final float[] textureTransformMatrix; private final float[] textureTransformMatrix;
private final Queue<FrameInfo> pendingFrames; private final Queue<FrameInfo> pendingFrames;
private final ScheduledExecutorService forceEndOfStreamExecutorService; private final ScheduledExecutorService scheduledExecutorService;
private final AtomicInteger externalShaderProgramInputCapacity; private final AtomicInteger externalShaderProgramInputCapacity;
private final boolean repeatLastRegisteredFrame; private final boolean repeatLastRegisteredFrame;
// Counts the frames that are registered before flush but are made available after flush.
private int numberOfFramesToDropOnBecomingAvailable;
private int availableFrameCount; private int availableFrameCount;
private boolean currentInputStreamEnded; private boolean currentInputStreamEnded;
@ -81,6 +84,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Nullable private Future<?> forceSignalEndOfStreamFuture; @Nullable private Future<?> forceSignalEndOfStreamFuture;
private boolean shouldRejectIncomingFrames; private boolean shouldRejectIncomingFrames;
// The first time trying to remove all frames from MediaCodec, used to escape repeated waiting for
// a frame to arrive on the SurfaceTexture.
private long firstTryToRemoveAllFramesTimeMs;
/** /**
* Creates a new instance. The caller's thread must have a current GL context. * Creates a new instance. The caller's thread must have a current GL context.
@ -113,7 +119,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
surfaceTexture = new SurfaceTexture(externalTexId); surfaceTexture = new SurfaceTexture(externalTexId);
textureTransformMatrix = new float[16]; textureTransformMatrix = new float[16];
pendingFrames = new ConcurrentLinkedQueue<>(); pendingFrames = new ConcurrentLinkedQueue<>();
forceEndOfStreamExecutorService = Util.newSingleThreadScheduledExecutor(TIMER_THREAD_NAME); scheduledExecutorService = Util.newSingleThreadScheduledExecutor(TIMER_THREAD_NAME);
externalShaderProgramInputCapacity = new AtomicInteger(); externalShaderProgramInputCapacity = new AtomicInteger();
surfaceTexture.setOnFrameAvailableListener( surfaceTexture.setOnFrameAvailableListener(
unused -> unused ->
@ -121,25 +127,38 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
() -> { () -> {
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_SURFACE_TEXTURE_INPUT, C.TIME_UNSET); DebugTraceUtil.EVENT_VFP_SURFACE_TEXTURE_INPUT, C.TIME_UNSET);
if (numberOfFramesToDropOnBecomingAvailable > 0) { if (shouldRejectIncomingFrames) {
numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage();
maybeExecuteAfterFlushTask();
} else if (shouldRejectIncomingFrames) {
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();
Log.w( Log.w(
TAG, TAG,
"Dropping frame received on SurfaceTexture after forcing EOS: " "Dropping frame received on SurfaceTexture after forcing EOS: "
+ surfaceTexture.getTimestamp() / 1000); + surfaceTexture.getTimestamp() / 1000);
} else { return;
if (currentInputStreamEnded) {
restartForceSignalEndOfStreamTimer();
}
availableFrameCount++;
maybeQueueFrameToExternalShaderProgram();
} }
if (currentInputStreamEnded) {
restartForceSignalEndOfStreamTimer();
}
availableFrameCount++;
maybeQueueFrameToExternalShaderProgram();
})); }));
surface = new Surface(surfaceTexture); surface = new Surface(surfaceTexture);
firstTryToRemoveAllFramesTimeMs = C.TIME_UNSET;
}
@Override
public void releaseAllRegisteredFrames() {
// Blocks the calling thread until all the registered frames are received and released.
CountDownLatch countDownLatch = new CountDownLatch(1);
videoFrameProcessingTaskExecutor.submit(() -> releaseAllFramesFromMediaCodec(countDownLatch));
try {
countDownLatch.await();
} catch (InterruptedException e) {
// Not re-thrown to not crash frame processing. Frame process can likely continue even when
// not all rendered frames arrive.
Thread.currentThread().interrupt();
Log.w(TAG, "Interrupted when waiting for MediaCodec frames to arrive.");
}
} }
/** /**
@ -150,7 +169,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) { public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) {
checkState(samplingGlShaderProgram instanceof ExternalShaderProgram); checkState(samplingGlShaderProgram instanceof ExternalShaderProgram);
externalShaderProgramInputCapacity.set(0);
this.externalShaderProgram = (ExternalShaderProgram) samplingGlShaderProgram; this.externalShaderProgram = (ExternalShaderProgram) samplingGlShaderProgram;
} }
@ -241,32 +259,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void release() { public void release() {
surfaceTexture.release(); surfaceTexture.release();
surface.release(); surface.release();
forceEndOfStreamExecutorService.shutdownNow(); scheduledExecutorService.shutdownNow();
} }
@Override @Override
protected void flush() { protected void flush() {
// A frame that is registered before flush may arrive after flush.
numberOfFramesToDropOnBecomingAvailable = pendingFrames.size() - availableFrameCount;
removeAllSurfaceTextureFrames();
externalShaderProgramInputCapacity.set(0); externalShaderProgramInputCapacity.set(0);
currentFrame = null; currentFrame = null;
pendingFrames.clear(); pendingFrames.clear();
lastRegisteredFrame = null; lastRegisteredFrame = null;
maybeExecuteAfterFlushTask();
}
private void maybeExecuteAfterFlushTask() {
if (numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
super.flush(); super.flush();
} }
private void restartForceSignalEndOfStreamTimer() { private void restartForceSignalEndOfStreamTimer() {
cancelForceSignalEndOfStreamTimer(); cancelForceSignalEndOfStreamTimer();
forceSignalEndOfStreamFuture = forceSignalEndOfStreamFuture =
forceEndOfStreamExecutorService.schedule( scheduledExecutorService.schedule(
() -> videoFrameProcessingTaskExecutor.submit(this::forceSignalEndOfStream), () -> videoFrameProcessingTaskExecutor.submit(this::forceSignalEndOfStream),
SURFACE_TEXTURE_TIMEOUT_MS, SURFACE_TEXTURE_TIMEOUT_MS,
MILLISECONDS); MILLISECONDS);
@ -289,7 +297,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Reset because there could be further input streams after the current one ends. // Reset because there could be further input streams after the current one ends.
currentInputStreamEnded = false; currentInputStreamEnded = false;
currentFrame = null; currentFrame = null;
pendingFrames.clear();
shouldRejectIncomingFrames = true; shouldRejectIncomingFrames = true;
// Frames could be made available while waiting for OpenGL to finish processing. That is, // Frames could be made available while waiting for OpenGL to finish processing. That is,
@ -297,13 +304,41 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// when there are frames available on the SurfaceTexture. This has only been observed on // when there are frames available on the SurfaceTexture. This has only been observed on
// emulators. // emulators.
removeAllSurfaceTextureFrames(); removeAllSurfaceTextureFrames();
pendingFrames.clear();
signalEndOfCurrentInputStream(); signalEndOfCurrentInputStream();
} }
private void releaseAllFramesFromMediaCodec(CountDownLatch latch) {
removeAllSurfaceTextureFrames();
if (pendingFrames.isEmpty()
// Assumes a frame that is registered would not take longer than SURFACE_TEXTURE_TIMEOUT_MS
// to arrive, otherwise unblock the waiting thread.
|| (firstTryToRemoveAllFramesTimeMs != C.TIME_UNSET
&& SystemClock.DEFAULT.currentTimeMillis() - firstTryToRemoveAllFramesTimeMs
>= SURFACE_TEXTURE_TIMEOUT_MS)) {
firstTryToRemoveAllFramesTimeMs = C.TIME_UNSET;
latch.countDown();
return;
}
if (firstTryToRemoveAllFramesTimeMs == C.TIME_UNSET) {
firstTryToRemoveAllFramesTimeMs = SystemClock.DEFAULT.currentTimeMillis();
}
Future<?> unusedFuture =
scheduledExecutorService.schedule(
() ->
videoFrameProcessingTaskExecutor.submit(
() -> releaseAllFramesFromMediaCodec(latch)),
SURFACE_TEXTURE_WAIT_DELAY_MS,
MILLISECONDS);
}
private void removeAllSurfaceTextureFrames() { private void removeAllSurfaceTextureFrames() {
while (availableFrameCount > 0) { while (availableFrameCount > 0) {
availableFrameCount--; availableFrameCount--;
surfaceTexture.updateTexImage(); surfaceTexture.updateTexImage();
pendingFrames.remove();
} }
} }

View File

@ -164,4 +164,7 @@ import androidx.media3.common.util.TimestampIterator;
} }
} }
} }
/** Releases all previously {@linkplain #registerInputFrame(FrameInfo) registered} frames. */
public void releaseAllRegisteredFrames() {}
} }

View File

@ -0,0 +1,155 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh.performance;
import static androidx.media3.common.util.Assertions.checkState;
import android.app.Instrumentation;
import android.content.Context;
import android.os.Handler;
import android.view.SurfaceView;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.exoplayer.DefaultRenderersFactory;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.util.EventLogger;
import androidx.media3.exoplayer.video.MediaCodecVideoRenderer;
import androidx.media3.exoplayer.video.VideoRendererEventListener;
import androidx.media3.transformer.PlayerTestListener;
import androidx.media3.transformer.SurfaceTestActivity;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Test for seeking when using {@link ExoPlayer#setVideoEffects(List)}. */
@RunWith(AndroidJUnit4.class)
public final class ExoplayerEffectPlaybackSeekTest {
private static final String MP4_ASSET = "asset:///media/mp4/sample.mp4";
// This timeout is made longer for emulators - see
// ExternalTextureManager.SURFACE_TEXTURE_TIMEOUT_MS.
private static final long TEST_TIMEOUT_MS = 20_000;
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
private final Context context = instrumentation.getContext().getApplicationContext();
private @MonotonicNonNull ExoPlayer player;
private SurfaceView surfaceView;
@Before
public void setUp() {
rule.getScenario().onActivity(activity -> surfaceView = activity.getSurfaceView());
}
@After
public void tearDown() {
rule.getScenario().close();
instrumentation.runOnMainSync(
() -> {
if (player != null) {
player.release();
}
});
}
@Test
public void seekTo_frameNotRenderedToSurfaceTexture_unblocksFrameProcessing() throws Exception {
// The test aims to test the scenario that
// 1. MCVR (MediaCodecVideoRenderer) registered a frame to DVFP(DefaultVideoFrameProcessor)'s
// ETM (ExternalTextureManager)
// 2. MCVR then have MediaCodec render that frame to DVFP
// a. When ETM receives the frame available callback, it posts the handling of the frame
// onto the GL thread
// 3. The player seeks, MCVR flushes the DVFP. This subsequently flushes the GL thread
// This test ensures playback continues regardless if the frame handling logic (2.a) is run or
// not. The test overrides the video renderer so that a frame is registered to DVFP, but not
// rendered by MediaCodec.
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
int frameIndexToSkip = 15;
ConditionVariable frameSkippedCondition = new ConditionVariable();
MediaCodecVideoRenderer videoRenderer =
new MediaCodecVideoRenderer(context, MediaCodecSelector.DEFAULT) {
private int numberOfFramesRendered;
// Overriding V21 is sufficient as we don't have test running below API26.
@Override
protected void renderOutputBufferV21(
MediaCodecAdapter codec, int index, long presentationTimeUs, long releaseTimeNs) {
numberOfFramesRendered++;
if (numberOfFramesRendered == frameIndexToSkip) {
frameSkippedCondition.open();
return;
}
super.renderOutputBufferV21(codec, index, presentationTimeUs, releaseTimeNs);
}
};
instrumentation.runOnMainSync(
() -> {
player =
new ExoPlayer.Builder(ApplicationProvider.getApplicationContext())
.setRenderersFactory(
new DefaultRenderersFactory(context) {
@Override
protected void buildVideoRenderers(
Context context,
@ExtensionRendererMode int extensionRendererMode,
MediaCodecSelector mediaCodecSelector,
boolean enableDecoderFallback,
Handler eventHandler,
VideoRendererEventListener eventListener,
long allowedVideoJoiningTimeMs,
ArrayList<Renderer> out) {
out.add(videoRenderer);
}
})
.build();
player.setPlayWhenReady(true);
player.setVideoSurfaceView(surfaceView);
// Use an empty list to enable effect playback.
player.setVideoEffects(ImmutableList.of());
// Adding an EventLogger to use its log output in case the test fails.
player.addAnalyticsListener(new EventLogger());
player.addListener(listener);
player.setMediaItem(MediaItem.fromUri(MP4_ASSET));
player.prepare();
});
// Wait until the frame is skipped, and checks enough frames are skipped when block() returns.
checkState(frameSkippedCondition.block(TEST_TIMEOUT_MS));
instrumentation.runOnMainSync(() -> player.seekTo(0));
listener.waitUntilPlayerEnded();
}
}