Report flushing completed after all pending frames are decoded.

With the current ExtTexMgr,

it can happen that

- `x` frames are registered, but haven't arrived yet
- flush
  - need to drop `x` frames when they arrive on SurfaceTexture
  - status is reset to 0 pending, 0 available, drop `x` when frames arrive
- register one frame
  - status is set to 1 pending, 0 available, drop `x` when frames arrive
- flush
  - now the number of frame to drop is reset to `pending - available = 1`
  - but it should be `x+1`

This CL solves the issue by reporting (by running the afterFlushTask) flush completes only after all the pending frames before calling flush are accounted for.

PiperOrigin-RevId: 506310671
This commit is contained in:
claincly 2023-02-01 15:29:32 +00:00 committed by christosts
parent 83bc474f3c
commit 77cc25242b

View File

@ -58,6 +58,7 @@ import java.util.concurrent.atomic.AtomicInteger;
// Set to null on any thread. Read and set to non-null on the GL thread only.
@Nullable private volatile FrameInfo currentFrame;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile FrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs;
@ -97,6 +98,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage();
maybeExecuteAfterFlushTask();
} else {
availableFrameCount++;
maybeQueueFrameToExternalTextureProcessor();
@ -184,10 +186,14 @@ import java.util.concurrent.atomic.AtomicInteger;
externalTextureProcessorInputCapacity.set(0);
currentFrame = null;
pendingFrames.clear();
maybeExecuteAfterFlushTask();
}
if (onFlushCompleteTask != null) {
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
private void maybeExecuteAfterFlushTask() {
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
}
@WorkerThread