Allow frame release to be controlled outside FrameProcessor.

Adds a method to FrameProcessor.Listener to be called when an
output frame is available and a method releaseOutputFrame in
FrameProcessor allowing the caller to trigger release of the
oldest available output frame at a given timestamp. Late frames
or frames with unset release times are dropped in the
FinalMatrixTransformationProcessorWrapper.

More than one output frame can become available before they are
released if the penultimate GlTextureProcessor is capable of producing
multiple output frames. Processing continues while waiting for
releaseOutputFrame to be called. Frame release tasks are prioritized
over other tasks.

PiperOrigin-RevId: 468473072
This commit is contained in:
Googler 2022-08-18 16:10:48 +00:00 committed by Marc Baechinger
parent 244d38cf0e
commit a5d7fdcab5
6 changed files with 590 additions and 117 deletions

View File

@ -47,6 +47,10 @@ public interface FrameProcessor {
* @param effects The {@link Effect} instances to apply to each frame.
* @param debugViewProvider A {@link DebugViewProvider}.
* @param colorInfo The {@link ColorInfo} for input and output frames.
* @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will release
* output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface}
* automatically as they become available. If {@code false}, the {@link FrameProcessor} will
* wait to release each frame until {@link #releaseOutputFrame(long)} is called.
* @return A new instance.
* @throws FrameProcessingException If a problem occurs while creating the {@link
* FrameProcessor}.
@ -56,7 +60,8 @@ public interface FrameProcessor {
Listener listener,
List<Effect> effects,
DebugViewProvider debugViewProvider,
ColorInfo colorInfo)
ColorInfo colorInfo,
boolean releaseFramesAutomatically)
throws FrameProcessingException;
}
@ -75,6 +80,13 @@ public interface FrameProcessor {
*/
void onOutputSizeChanged(int width, int height);
/**
* Called when an output frame with the given {@code presentationTimeNs} becomes available.
*
* @param presentationTimeNs The presentation time of the frame, in nanoseconds.
*/
void onOutputFrameAvailable(long presentationTimeNs);
/**
* Called when an exception occurs during asynchronous frame processing.
*
@ -137,6 +149,20 @@ public interface FrameProcessor {
*/
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
/**
* Releases the oldest unreleased output frame that has become {@linkplain
* Listener#onOutputFrameAvailable(long) available} at the given {@code releaseTimeNs}.
*
* <p>This method must only be called if {@code releaseFramesAutomatically} was set to {@code
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
* {@linkplain Listener#onOutputFrameAvailable(long) available}.
*
* @param releaseTimeNs The release time to use for the frame, in nanoseconds. Use {@link
* C#TIME_UNSET} to drop the frame. If {@code releaseTimeNs} is after {@link
* System#nanoTime()} at the time of the release, the frame is also dropped.
*/
void releaseOutputFrame(long releaseTimeNs);
/**
* Informs the {@code FrameProcessor} that no further input frames should be accepted.
*

View File

@ -0,0 +1,361 @@
/*
* Copyright 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for frame release in {@link GlEffectsFrameProcessor}. */
@RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorFrameReleaseTest {
private static final int WIDTH = 200;
private static final int HEIGHT = 100;
private static final long FRAME_PROCESSING_WAIT_MS = 5000L;
private static final long MILLIS_TO_NANOS = 1_000_000L;
private static final long MICROS_TO_NANOS = 1000L;
private final AtomicReference<FrameProcessingException> frameProcessingException =
new AtomicReference<>();
private final Queue<Long> outputReleaseTimesNs = new ConcurrentLinkedQueue<>();
private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor;
private volatile @MonotonicNonNull Runnable produceBlankFramesTask;
@After
public void release() {
if (glEffectsFrameProcessor != null) {
glEffectsFrameProcessor.release();
}
}
@Test
public void automaticFrameRelease_withOneFrame_reusesInputTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ actualPresentationTimeNs::set,
/* releaseFramesAutomatically= */ true);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
}
@Test
public void automaticFrameRelease_withThreeFrames_reusesInputTimestamps() throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesNs::add,
/* releaseFramesAutomatically= */ true);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(outputReleaseTimesNs).containsExactlyElementsIn(actualPresentationTimesNs).inOrder();
}
@Test
public void controlledFrameRelease_withOneFrame_usesGivenTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimesNs = System.nanoTime() + MILLIS_TO_NANOS * FRAME_PROCESSING_WAIT_MS + 345678;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs);
},
/* releaseFramesAutomatically= */ false);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).containsExactly(releaseTimesNs);
}
@Test
public void controlledFrameRelease_withLateFrame_dropsFrame() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
},
/* releaseFramesAutomatically= */ false);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).isEmpty();
}
@Test
public void controlledFrameRelease_withUnsetReleaseTime_dropsFrame() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeNs = new AtomicLong();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeNs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor)
.releaseOutputFrame(/* releaseTimeNs= */ C.TIME_UNSET);
},
/* releaseFramesAutomatically= */ false);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimeNs.get())
.isEqualTo(MICROS_TO_NANOS * originalPresentationTimeUs);
assertThat(outputReleaseTimesNs).isEmpty();
}
@Test
public void controlledFrameRelease_withThreeIndividualFrames_usesGivenTimestamps()
throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime() + MILLIS_TO_NANOS * FRAME_PROCESSING_WAIT_MS;
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
AtomicInteger frameIndex = new AtomicInteger();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimesNs.add(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor)
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
},
/* releaseFramesAutomatically= */ false);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(frameIndex.get()).isEqualTo(originalPresentationTimesUs.length);
assertThat(outputReleaseTimesNs)
.containsExactly(releaseTimesNs[0], releaseTimesNs[1], releaseTimesNs[2])
.inOrder();
}
@Test
public void controlledFrameRelease_withThreeFramesAtOnce_usesGivenTimestamps() throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime() + MILLIS_TO_NANOS * 2 * FRAME_PROCESSING_WAIT_MS;
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesNs = new ArrayList<>();
setupGlEffectsFrameProcessorWithBlankFrameProducer(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesNs::add,
/* releaseFramesAutomatically= */ false);
checkNotNull(produceBlankFramesTask).run();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingException.get()).isNull();
assertThat(actualPresentationTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
MICROS_TO_NANOS * originalPresentationTimesUs[2])
.inOrder();
assertThat(outputReleaseTimesNs)
.containsExactly(releaseTimesNs[0], releaseTimesNs[1], releaseTimesNs[2])
.inOrder();
}
private interface OnFrameAvailableListener {
void onFrameAvailable(long presentationTimeNs);
}
@EnsuresNonNull("glEffectsFrameProcessor")
private void setupGlEffectsFrameProcessorWithBlankFrameProducer(
long[] inputPresentationTimesUs,
OnFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically)
throws Exception {
glEffectsFrameProcessor =
checkNotNull(
new GlEffectsFrameProcessor.Factory()
.create(
getApplicationContext(),
new FrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
ImageReader outputImageReader =
ImageReader.newInstance(
width,
height,
PixelFormat.RGBA_8888,
/* maxImages= */ inputPresentationTimesUs.length);
checkNotNull(glEffectsFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireNextImage()) {
outputReleaseTimesNs.add(image.getTimestamp());
}
},
Util.createHandlerForCurrentOrMainLooper());
}
@Override
public void onOutputFrameAvailable(long presentationTimeNs) {
onFrameAvailableListener.onFrameAvailable(presentationTimeNs);
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingException.set(exception);
}
@Override
public void onFrameProcessingEnded() {}
},
ImmutableList.of(
(GlEffect)
(context, useHdr) ->
new BlankFrameProducer(inputPresentationTimesUs, useHdr)),
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED,
releaseFramesAutomatically));
glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(WIDTH, HEIGHT, /* pixelWidthHeightRatio= */ 1, /* streamOffsetUs= */ 0));
// A frame needs to be registered despite not queuing any external input to ensure that the
// frame processor knows about the stream offset.
glEffectsFrameProcessor.registerInputFrame();
}
/** Produces blank frames with the given timestamps. */
private final class BlankFrameProducer implements GlTextureProcessor {
private final TextureInfo blankTexture;
private final long[] presentationTimesUs;
public BlankFrameProducer(long[] presentationTimesUs, boolean useHdr)
throws FrameProcessingException {
this.presentationTimesUs = presentationTimesUs;
try {
int texId = GlUtil.createTexture(WIDTH, HEIGHT, useHdr);
int fboId = GlUtil.createFboForTexture(texId);
blankTexture = new TextureInfo(texId, fboId, WIDTH, HEIGHT);
GlUtil.focusFramebufferUsingCurrentContext(fboId, WIDTH, HEIGHT);
GlUtil.clearOutputFrame();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
}
}
@Override
public void setInputListener(InputListener inputListener) {}
@Override
public void setOutputListener(OutputListener outputListener) {
produceBlankFramesTask =
() -> {
for (long presentationTimeUs : presentationTimesUs) {
outputListener.onOutputFrameAvailable(blankTexture, presentationTimeUs);
}
};
}
@Override
public void setErrorListener(ErrorListener errorListener) {}
@Override
public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
// No input is queued in these tests. The BlankFrameProducer is used to produce frames.
throw new UnsupportedOperationException();
}
@Override
public void releaseOutputFrame(TextureInfo outputTexture) {}
@Override
public void signalEndOfCurrentInputStream() {
// The tests don't end the input stream.
throw new UnsupportedOperationException();
}
@Override
public void release() {
// Do nothing as destroying the OpenGL context destroys the texture.
}
}
}

View File

@ -476,6 +476,11 @@ public final class GlEffectsFrameProcessorPixelTest {
new SurfaceInfo(outputImageReader.getSurface(), width, height));
}
@Override
public void onOutputFrameAvailable(long presentationTimeNs) {
// Do nothing as frames are released automatically.
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingException.set(exception);
@ -488,7 +493,8 @@ public final class GlEffectsFrameProcessorPixelTest {
},
effects,
DebugViewProvider.NONE,
ColorInfo.SDR_BT709_LIMITED));
ColorInfo.SDR_BT709_LIMITED,
/* releaseFramesAutomatically= */ true));
glEffectsFrameProcessor.setInputFrameInfo(
new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio, /* streamOffsetUs= */ 0));
glEffectsFrameProcessor.registerInputFrame();

View File

@ -16,6 +16,7 @@
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import android.content.Context;
import android.opengl.EGL14;
@ -71,8 +72,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final FrameProcessor.Listener frameProcessorListener;
private final boolean sampleFromExternalTexture;
private final ColorInfo colorInfo;
private final boolean releaseFramesAutomatically;
private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<TextureInfo, Long>> availableFrames;
private int inputWidth;
private int inputHeight;
@ -100,7 +103,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
FrameProcessor.Listener frameProcessorListener,
DebugViewProvider debugViewProvider,
boolean sampleFromExternalTexture,
ColorInfo colorInfo) {
ColorInfo colorInfo,
boolean releaseFramesAutomatically) {
this.context = context;
this.matrixTransformations = matrixTransformations;
this.eglDisplay = eglDisplay;
@ -109,11 +113,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.frameProcessorListener = frameProcessorListener;
this.sampleFromExternalTexture = sampleFromExternalTexture;
this.colorInfo = colorInfo;
this.releaseFramesAutomatically = releaseFramesAutomatically;
textureTransformMatrix = new float[16];
Matrix.setIdentityM(textureTransformMatrix, /* smOffset= */ 0);
streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
inputListener = new InputListener() {};
availableFrames = new ConcurrentLinkedQueue<>();
}
@Override
@ -136,12 +142,127 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void queueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream specified.");
long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
long presentationTimeNs = (presentationTimeUs + streamOffsetUs) * 1000;
frameProcessorListener.onOutputFrameAvailable(presentationTimeNs);
if (releaseFramesAutomatically) {
renderFrameToSurfaces(
inputTexture,
presentationTimeUs,
/* releaseTimeNs= */ presentationTimeNs,
/* dropLateFrame= */ false);
} else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
}
inputListener.onReadyToAcceptInputFrame();
}
@Override
public void releaseOutputFrame(TextureInfo outputTexture) {
// The final texture processor writes to a surface so there is no texture to release.
throw new UnsupportedOperationException();
}
@WorkerThread
public void releaseOutputFrame(long releaseTimeNs) {
checkState(!releaseFramesAutomatically);
Pair<TextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
renderFrameToSurfaces(
/* inputTexture= */ oldestAvailableFrame.first,
/* presentationTimeUs= */ oldestAvailableFrame.second,
releaseTimeNs,
/* dropLateFrame= */ true);
}
@Override
public void signalEndOfCurrentInputStream() {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
frameProcessorListener.onFrameProcessingEnded();
}
}
@Override
@WorkerThread
public void release() throws FrameProcessingException {
if (matrixTransformationProcessor != null) {
matrixTransformationProcessor.release();
}
}
@Override
public void setTextureTransformMatrix(float[] textureTransformMatrix) {
System.arraycopy(
/* src= */ textureTransformMatrix,
/* srcPos= */ 0,
/* dest= */ this.textureTransformMatrix,
/* destPost= */ 0,
/* length= */ textureTransformMatrix.length);
if (matrixTransformationProcessor != null) {
matrixTransformationProcessor.setTextureTransformMatrix(textureTransformMatrix);
}
}
/**
* Signals that there will be another input stream after all previously appended input streams
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
*
* <p>This method does not need to be called on the GL thread, but the caller must ensure that
* stream offsets are appended in the correct order.
*
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
*/
public void appendStream(long streamOffsetUs) {
streamOffsetUsQueue.add(streamOffsetUs);
}
/**
* Sets the output {@link SurfaceInfo}.
*
* @see FrameProcessor#setOutputSurfaceInfo(SurfaceInfo)
*/
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
if (outputSurfaceInfo != null
&& this.outputSurfaceInfo != null
&& !this.outputSurfaceInfo.surface.equals(outputSurfaceInfo.surface)) {
this.outputEglSurface = null;
}
outputSizeOrRotationChanged =
this.outputSurfaceInfo == null
|| outputSurfaceInfo == null
|| this.outputSurfaceInfo.width != outputSurfaceInfo.width
|| this.outputSurfaceInfo.height != outputSurfaceInfo.height
|| this.outputSurfaceInfo.orientationDegrees != outputSurfaceInfo.orientationDegrees;
this.outputSurfaceInfo = outputSurfaceInfo;
}
}
private void renderFrameToSurfaces(
TextureInfo inputTexture,
long presentationTimeUs,
long releaseTimeNs,
boolean dropLateFrame) {
try {
synchronized (this) {
if (!ensureConfigured(inputTexture.width, inputTexture.height)) {
maybeRenderFrameToOutputSurface(
inputTexture, presentationTimeUs, releaseTimeNs, dropLateFrame);
} catch (FrameProcessingException | GlUtil.GlException e) {
frameProcessorListener.onFrameProcessingError(
FrameProcessingException.from(e, presentationTimeUs));
}
maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture);
}
private synchronized void maybeRenderFrameToOutputSurface(
TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs, boolean dropLateFrame)
throws FrameProcessingException, GlUtil.GlException {
if (!ensureConfigured(inputTexture.width, inputTexture.height)) {
return; // Drop frames when there is no output surface.
}
@ -158,32 +279,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
outputSurfaceInfo.height);
GlUtil.clearOutputFrame();
matrixTransformationProcessor.drawFrame(inputTexture.texId, presentationTimeUs);
EGLExt.eglPresentationTimeANDROID(
eglDisplay,
outputEglSurface,
/* presentationTimeNs= */ (presentationTimeUs + streamOffsetUsQueue.element()) * 1000);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
}
} catch (FrameProcessingException | GlUtil.GlException e) {
frameProcessorListener.onFrameProcessingError(
FrameProcessingException.from(e, presentationTimeUs));
}
if (debugSurfaceViewWrapper != null && matrixTransformationProcessor != null) {
MatrixTransformationProcessor matrixTransformationProcessor =
this.matrixTransformationProcessor;
try {
debugSurfaceViewWrapper.maybeRenderToSurfaceView(
() -> {
GlUtil.clearOutputFrame();
matrixTransformationProcessor.drawFrame(inputTexture.texId, presentationTimeUs);
});
} catch (FrameProcessingException | GlUtil.GlException e) {
Log.d(TAG, "Error rendering to debug preview", e);
if (dropLateFrame && System.nanoTime() > releaseTimeNs) {
return;
}
}
inputListener.onInputFrameProcessed(inputTexture);
inputListener.onReadyToAcceptInputFrame();
EGLExt.eglPresentationTimeANDROID(eglDisplay, outputEglSurface, releaseTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
}
@EnsuresNonNullIf(
@ -282,76 +383,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return matrixTransformationProcessor;
}
@Override
public void releaseOutputFrame(TextureInfo outputTexture) {
// The final texture processor writes to a surface so there is no texture to release.
throw new UnsupportedOperationException();
private void maybeRenderFrameToDebugSurface(TextureInfo inputTexture, long presentationTimeUs) {
if (debugSurfaceViewWrapper == null || matrixTransformationProcessor == null) {
return;
}
@Override
public void signalEndOfCurrentInputStream() {
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
frameProcessorListener.onFrameProcessingEnded();
}
}
@Override
@WorkerThread
public void release() throws FrameProcessingException {
if (matrixTransformationProcessor != null) {
matrixTransformationProcessor.release();
}
}
@Override
public void setTextureTransformMatrix(float[] textureTransformMatrix) {
System.arraycopy(
/* src= */ textureTransformMatrix,
/* srcPos= */ 0,
/* dest= */ this.textureTransformMatrix,
/* destPost= */ 0,
/* length= */ textureTransformMatrix.length);
if (matrixTransformationProcessor != null) {
matrixTransformationProcessor.setTextureTransformMatrix(textureTransformMatrix);
}
}
/**
* Signals that there will be another input stream after all previously appended input streams
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
*
* <p>This method does not need to be called on the GL thread, but the caller must ensure that
* stream offsets are appended in the correct order.
*
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
*/
public void appendStream(long streamOffsetUs) {
streamOffsetUsQueue.add(streamOffsetUs);
}
/**
* Sets the output {@link SurfaceInfo}.
*
* @see FrameProcessor#setOutputSurfaceInfo(SurfaceInfo)
*/
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
if (outputSurfaceInfo != null
&& this.outputSurfaceInfo != null
&& !this.outputSurfaceInfo.surface.equals(outputSurfaceInfo.surface)) {
this.outputEglSurface = null;
}
outputSizeOrRotationChanged =
this.outputSurfaceInfo == null
|| outputSurfaceInfo == null
|| this.outputSurfaceInfo.width != outputSurfaceInfo.width
|| this.outputSurfaceInfo.height != outputSurfaceInfo.height
|| this.outputSurfaceInfo.orientationDegrees != outputSurfaceInfo.orientationDegrees;
this.outputSurfaceInfo = outputSurfaceInfo;
MatrixTransformationProcessor matrixTransformationProcessor =
this.matrixTransformationProcessor;
try {
debugSurfaceViewWrapper.maybeRenderToSurfaceView(
() -> {
GlUtil.clearOutputFrame();
matrixTransformationProcessor.drawFrame(inputTexture.texId, presentationTimeUs);
});
} catch (FrameProcessingException | GlUtil.GlException e) {
Log.d(TAG, "Error rendering to debug preview", e);
}
}

View File

@ -67,7 +67,8 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
FrameProcessor.Listener listener,
List<Effect> effects,
DebugViewProvider debugViewProvider,
ColorInfo colorInfo)
ColorInfo colorInfo,
boolean releaseFramesAutomatically)
throws FrameProcessingException {
ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME);
@ -81,6 +82,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
effects,
debugViewProvider,
colorInfo,
releaseFramesAutomatically,
singleThreadExecutorService));
try {
@ -111,6 +113,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
List<Effect> effects,
DebugViewProvider debugViewProvider,
ColorInfo colorInfo,
boolean releaseFramesAutomatically,
ExecutorService singleThreadExecutorService)
throws GlUtil.GlException, FrameProcessingException {
checkState(Thread.currentThread().getName().equals(THREAD_NAME));
@ -135,13 +138,24 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ImmutableList<GlTextureProcessor> textureProcessors =
getGlTextureProcessorsForGlEffects(
context, effects, eglDisplay, eglContext, listener, debugViewProvider, colorInfo);
context,
effects,
eglDisplay,
eglContext,
listener,
debugViewProvider,
colorInfo,
releaseFramesAutomatically);
FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(singleThreadExecutorService, listener);
chainTextureProcessorsWithListeners(textureProcessors, frameProcessingTaskExecutor, listener);
return new GlEffectsFrameProcessor(
eglDisplay, eglContext, frameProcessingTaskExecutor, textureProcessors);
eglDisplay,
eglContext,
frameProcessingTaskExecutor,
textureProcessors,
releaseFramesAutomatically);
}
/**
@ -163,7 +177,8 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
EGLContext eglContext,
FrameProcessor.Listener listener,
DebugViewProvider debugViewProvider,
ColorInfo colorInfo)
ColorInfo colorInfo,
boolean releaseFramesAutomatically)
throws FrameProcessingException {
ImmutableList.Builder<GlTextureProcessor> textureProcessorListBuilder =
new ImmutableList.Builder<>();
@ -244,7 +259,8 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
listener,
debugViewProvider,
sampleFromExternalTexture,
colorInfo));
colorInfo,
releaseFramesAutomatically));
return textureProcessorListBuilder.build();
}
@ -278,6 +294,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final ExternalTextureManager inputExternalTextureManager;
private final Surface inputSurface;
private final boolean releaseFramesAutomatically;
private final FinalMatrixTransformationProcessorWrapper finalTextureProcessorWrapper;
private final ImmutableList<GlTextureProcessor> allTextureProcessors;
@ -293,12 +310,14 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
EGLDisplay eglDisplay,
EGLContext eglContext,
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
ImmutableList<GlTextureProcessor> textureProcessors)
ImmutableList<GlTextureProcessor> textureProcessors,
boolean releaseFramesAutomatically)
throws FrameProcessingException {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!textureProcessors.isEmpty());
checkState(textureProcessors.get(0) instanceof ExternalTextureProcessor);
@ -349,6 +368,15 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
finalTextureProcessorWrapper.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public void releaseOutputFrame(long releaseTimeNs) {
checkState(
!releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
frameProcessingTaskExecutor.submitWithHighPriority(
() -> finalTextureProcessorWrapper.releaseOutputFrame(releaseTimeNs));
}
@Override
public void signalEndOfInput() {
checkState(!inputStreamEnded);

View File

@ -125,6 +125,11 @@ import org.checkerframework.dataflow.qual.Pure;
}
}
@Override
public void onOutputFrameAvailable(long presentationTimeNs) {
// Do nothing as frames are released automatically.
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
asyncErrorListener.onTransformationException(
@ -147,7 +152,8 @@ import org.checkerframework.dataflow.qual.Pure;
// This implies that the OpenGL EXT_YUV_target extension is supported and hence the
// default FrameProcessor, GlEffectsFrameProcessor, also supports HDR. Otherwise, tone
// mapping is applied, which ensures the decoder outputs SDR output for an HDR input.
encoderWrapper.getSupportedInputColor());
encoderWrapper.getSupportedInputColor(),
/* releaseFramesAutomatically= */ true);
} catch (FrameProcessingException e) {
throw TransformationException.createForFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED);