Compositor: Use timestamps to release frames.

Also, implement back-pressure to avoid requesting more than all of the
compositor's texturepool textures

PiperOrigin-RevId: 548179800
This commit is contained in:
huangdarwin 2023-07-14 19:37:09 +01:00 committed by Ian Baker
parent 8c4aa6b75d
commit 0c29dacde3
4 changed files with 76 additions and 53 deletions

View File

@ -401,7 +401,11 @@ public final class GlUtil {
return syncObject; return syncObject;
} }
/** Releases the underlying native object. */ /**
* Deletes the underlying native object.
*
* <p>The {@code syncObject} must not be used after deletion.
*/
public static void deleteSyncObject(long syncObject) throws GlException { public static void deleteSyncObject(long syncObject) throws GlException {
GLES30.glDeleteSync(syncObject); GLES30.glDeleteSync(syncObject);
checkGlError(); checkGlError();

View File

@ -213,6 +213,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
private void releaseOutputFrame(long presentationTimeUs) {
videoFrameProcessingTaskExecutor.submit(() -> releaseOutputFrameInternal(presentationTimeUs));
}
private void releaseOutputFrameInternal(long presentationTimeUs) throws GlUtil.GlException {
while (outputTexturePool.freeTextureCount() < outputTexturePool.capacity()
&& checkNotNull(outputTextureTimestamps.peek()) <= presentationTimeUs) {
outputTexturePool.freeTexture();
outputTextureTimestamps.remove();
GlUtil.deleteSyncObject(syncObjects.remove());
maybeOnReadyToAcceptInputFrame();
}
}
/** /**
* Sets the list of {@link GlMatrixTransformation GlMatrixTransformations} and list of {@link * Sets the list of {@link GlMatrixTransformation GlMatrixTransformations} and list of {@link
* RgbMatrix RgbMatrices} to apply to the next {@linkplain #queueInputFrame queued} frame. * RgbMatrix RgbMatrices} to apply to the next {@linkplain #queueInputFrame queued} frame.
@ -229,20 +243,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
matrixTransformationsChanged = true; matrixTransformationsChanged = true;
} }
public void releaseOutputFrame(long presentationTimeUs) {
videoFrameProcessingTaskExecutor.submit(() -> releaseOutputFrameInternal(presentationTimeUs));
}
private void releaseOutputFrameInternal(long presentationTimeUs) throws GlUtil.GlException {
while (outputTexturePool.freeTextureCount() < outputTexturePool.capacity()
&& checkNotNull(outputTextureTimestamps.peek()) <= presentationTimeUs) {
outputTexturePool.freeTexture();
outputTextureTimestamps.remove();
GlUtil.deleteSyncObject(syncObjects.remove());
maybeOnReadyToAcceptInputFrame();
}
}
@Override @Override
public void flush() { public void flush() {
// Drops all frames that aren't rendered yet. // Drops all frames that aren't rendered yet.

View File

@ -21,6 +21,7 @@ import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
@ -52,6 +53,7 @@ public final class VideoCompositor {
// * Handle mismatched timestamps // * Handle mismatched timestamps
// * Before allowing customization of this class, add an interface, and rename this class to // * Before allowing customization of this class, add an interface, and rename this class to
// DefaultCompositor. // DefaultCompositor.
// * Use a lock to synchronize inputFrameInfos more narrowly, to reduce blocking.
/** Listener for errors. */ /** Listener for errors. */
public interface ErrorListener { public interface ErrorListener {
@ -79,11 +81,13 @@ public final class VideoCompositor {
private final List<Queue<InputFrameInfo>> inputFrameInfos; private final List<Queue<InputFrameInfo>> inputFrameInfos;
private final TexturePool outputTexturePool; private final TexturePool outputTexturePool;
private final Queue<Long> outputTextureTimestamps; // Synchronized with outputTexturePool.
private final Queue<Long> syncObjects; // Synchronized with outputTexturePool.
// Only used on the GL Thread. // Only used on the GL Thread.
private @MonotonicNonNull EGLContext eglContext; private @MonotonicNonNull EGLContext eglContext;
private @MonotonicNonNull EGLDisplay eglDisplay; private @MonotonicNonNull EGLDisplay eglDisplay;
private @MonotonicNonNull GlProgram glProgram; private @MonotonicNonNull GlProgram glProgram;
private long syncObject; private @MonotonicNonNull EGLSurface placeholderEglSurface;
/** /**
* Creates an instance. * Creates an instance.
@ -105,6 +109,8 @@ public final class VideoCompositor {
inputFrameInfos = new ArrayList<>(); inputFrameInfos = new ArrayList<>();
outputTexturePool = outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity); new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
outputTextureTimestamps = new ArrayDeque<>(textureOutputCapacity);
syncObjects = new ArrayDeque<>(textureOutputCapacity);
boolean ownsExecutor = executorService == null; boolean ownsExecutor = executorService == null;
ExecutorService instanceExecutorService = ExecutorService instanceExecutorService =
@ -142,13 +148,7 @@ public final class VideoCompositor {
InputFrameInfo inputFrameInfo = InputFrameInfo inputFrameInfo =
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback); new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo); checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo);
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
videoFrameProcessingTaskExecutor.submit(
() -> {
if (isReadyToComposite()) {
compositeToOutputTexture();
}
});
} }
public void release() { public void release() {
@ -162,25 +162,19 @@ public final class VideoCompositor {
// Below methods must be called on the GL thread. // Below methods must be called on the GL thread.
private void setupGlObjects() throws GlUtil.GlException { private void setupGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.getDefaultEglDisplay(); eglDisplay = GlUtil.getDefaultEglDisplay();
EGLContext eglContext = eglContext =
glObjectsProvider.createEglContext( glObjectsProvider.createEglContext(
eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888); eglDisplay, /* openGlVersion= */ 2, GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay); placeholderEglSurface =
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay);
} }
private synchronized boolean isReadyToComposite() { private synchronized void maybeComposite() throws VideoFrameProcessingException {
// TODO: b/262694346 - Use timestamps to determine when to composite instead of number of if (!isReadyToComposite()) {
// frames. return;
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
if (checkNotNull(inputFrameInfos.get(inputId)).isEmpty()) {
return false;
}
} }
return true;
}
private synchronized void compositeToOutputTexture() throws VideoFrameProcessingException {
List<InputFrameInfo> framesToComposite = new ArrayList<>(); List<InputFrameInfo> framesToComposite = new ArrayList<>();
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) { for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove()); framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove());
@ -199,27 +193,55 @@ public final class VideoCompositor {
outputTexturePool.ensureConfigured( outputTexturePool.ensureConfigured(
glObjectsProvider, inputFrame1.texture.width, inputFrame1.texture.height); glObjectsProvider, inputFrame1.texture.width, inputFrame1.texture.height);
GlTextureInfo outputTexture = outputTexturePool.useTexture(); GlTextureInfo outputTexture = outputTexturePool.useTexture();
long outputPresentationTimestampUs = framesToComposite.get(0).presentationTimeUs;
outputTextureTimestamps.add(outputPresentationTimestampUs);
drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture); drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture);
syncObject = GlUtil.createGlSyncFence(); long syncObject = GlUtil.createGlSyncFence();
syncObjects.add(syncObject);
textureOutputListener.onTextureRendered(
outputTexture,
/* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
this::releaseOutputFrame,
syncObject);
for (int i = 0; i < framesToComposite.size(); i++) { for (int i = 0; i < framesToComposite.size(); i++) {
InputFrameInfo inputFrameInfo = framesToComposite.get(i); InputFrameInfo inputFrameInfo = framesToComposite.get(i);
inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs); inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs);
} }
// TODO: b/262694346 - Use presentationTimeUs here for freeing textures.
textureOutputListener.onTextureRendered(
checkNotNull(outputTexture),
/* presentationTimeUs= */ framesToComposite.get(0).presentationTimeUs,
(presentationTimeUs) ->
videoFrameProcessingTaskExecutor.submit(outputTexturePool::freeTexture),
syncObject);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }
} }
private synchronized boolean isReadyToComposite() {
if (outputTexturePool.freeTextureCount() == 0) {
return false;
}
// TODO: b/262694346 - Use timestamps to determine when to composite instead of number of
// frames.
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
if (checkNotNull(inputFrameInfos.get(inputId)).isEmpty()) {
return false;
}
}
return true;
}
private void releaseOutputFrame(long presentationTimeUs) {
videoFrameProcessingTaskExecutor.submit(() -> releaseOutputFrameInternal(presentationTimeUs));
}
private synchronized void releaseOutputFrameInternal(long presentationTimeUs)
throws VideoFrameProcessingException, GlUtil.GlException {
while (outputTexturePool.freeTextureCount() < outputTexturePool.capacity()
&& checkNotNull(outputTextureTimestamps.peek()) <= presentationTimeUs) {
outputTexturePool.freeTexture();
outputTextureTimestamps.remove();
GlUtil.deleteSyncObject(syncObjects.remove());
}
maybeComposite();
}
private void ensureGlProgramConfigured() throws VideoFrameProcessingException { private void ensureGlProgramConfigured() throws VideoFrameProcessingException {
if (glProgram != null) { if (glProgram != null) {
return; return;
@ -262,10 +284,11 @@ public final class VideoCompositor {
private void releaseGlObjects() { private void releaseGlObjects() {
try { try {
outputTexturePool.deleteAllTextures(); outputTexturePool.deleteAllTextures();
GlUtil.destroyEglSurface(eglDisplay, placeholderEglSurface);
if (glProgram != null) { if (glProgram != null) {
glProgram.delete(); glProgram.delete();
} }
} catch (Exception e) { } catch (GlUtil.GlException e) {
Log.e(TAG, "Error releasing GL resources", e); Log.e(TAG, "Error releasing GL resources", e);
} finally { } finally {
try { try {

View File

@ -96,9 +96,7 @@ public final class VideoCompositorPixelTest {
testId, testId,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> { (outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
try { try {
if (useSharedExecutor) { if (!useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject);
} else {
GlUtil.awaitSyncObject(syncObject); GlUtil.awaitSyncObject(syncObject);
} }
compositedOutputBitmap.set( compositedOutputBitmap.set(
@ -146,9 +144,7 @@ public final class VideoCompositorPixelTest {
testId, testId,
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> { (outputTexture, presentationTimeUs, releaseOutputTextureCallback, syncObject) -> {
try { try {
if (useSharedExecutor) { if (!useSharedExecutor) {
GlUtil.deleteSyncObject(syncObject);
} else {
GlUtil.awaitSyncObject(syncObject); GlUtil.awaitSyncObject(syncObject);
} }
if (compositedFirstOutputBitmap.get() == null) { if (compositedFirstOutputBitmap.get() == null) {