effect: Rename VFP frame release to render.

renderOutputFrame actually renders frames to an output surface. We'll soon have
a releaseOutputFrame method, that would release resources associated with an
output time, so rename this to disambiguate the two methods.

Also rename onOutputFrameAvailable to onOutputFrameAvailableForRendering, to
make it clear this is not available for "release"

This change should be a renaming-only change and have no functional differences.

PiperOrigin-RevId: 527844947
This commit is contained in:
huangdarwin 2023-04-28 12:13:08 +01:00 committed by Marc Baechinger
parent 3a3322dbc5
commit 24343f55af
8 changed files with 157 additions and 152 deletions

View File

@ -79,10 +79,10 @@ public interface VideoFrameProcessor {
* @param inputColorInfo The {@link ColorInfo} for input frames.
* @param outputColorInfo The {@link ColorInfo} for output frames.
* @param inputType The {@link InputType}.
* @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to
* render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}.
@ -97,7 +97,7 @@ public interface VideoFrameProcessor {
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@InputType int inputType,
boolean releaseFramesAutomatically,
boolean renderFramesAutomatically,
Executor executor,
Listener listener)
throws VideoFrameProcessingException;
@ -128,7 +128,7 @@ public interface VideoFrameProcessor {
*
* @param presentationTimeUs The presentation time of the frame, in microseconds.
*/
void onOutputFrameAvailable(long presentationTimeUs);
void onOutputFrameAvailableForRendering(long presentationTimeUs);
/**
* Called when an exception occurs during asynchronous video frame processing.
@ -143,12 +143,12 @@ public interface VideoFrameProcessor {
}
/**
* Indicates the frame should be released immediately after {@link #releaseOutputFrame(long)} is
* Indicates the frame should be rendered immediately after {@link #renderOutputFrame(long)} is
* invoked.
*/
long RELEASE_OUTPUT_FRAME_IMMEDIATELY = -1;
long RENDER_OUTPUT_FRAME_IMMEDIATELY = -1;
/** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */
/** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */
long DROP_OUTPUT_FRAME = -2;
/**
@ -226,7 +226,7 @@ public interface VideoFrameProcessor {
int getPendingInputFrameCount();
/**
* Sets the output surface and supporting information. When output frames are released and not
* Sets the output surface and supporting information. When output frames are rendered and not
* dropped, they will be rendered to this output {@link SurfaceInfo}.
*
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
@ -244,24 +244,25 @@ public interface VideoFrameProcessor {
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
/**
* Releases the oldest unreleased output frame that has become {@linkplain
* Listener#onOutputFrameAvailable(long) available} at the given {@code releaseTimeNs}.
* Renders the oldest unrendered output frame that has become {@linkplain
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
* renderTimeNs}.
*
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
* surface}, or drop the frame, per {@code releaseTimeNs}.
* surface}, or drop the frame, per {@code renderTimeNs}.
*
* <p>This method must only be called if {@code releaseFramesAutomatically} was set to {@code
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
* {@linkplain Listener#onOutputFrameAvailable(long) available}.
* {@linkplain Listener#onOutputFrameAvailableForRendering(long) available for rendering}.
*
* <p>The {@code releaseTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
* <p>The {@code renderTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
* depending on the implementation.
*
* @param releaseTimeNs The release time to use for the frame, in nanoseconds. The release time
* can be before of after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
* frame, or {@link #RELEASE_OUTPUT_FRAME_IMMEDIATELY} to release the frame immediately.
* @param renderTimeNs The render time to use for the frame, in nanoseconds. The render time can
* be before or after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
* frame, or {@link #RENDER_OUTPUT_FRAME_IMMEDIATELY} to render the frame immediately.
*/
void releaseOutputFrame(long releaseTimeNs);
void renderOutputFrame(long renderTimeNs);
/**
* Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.

View File

@ -114,7 +114,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build();
long offsetUs = 1_000_000L;
@ -137,7 +137,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build();
long offsetUs1 = 1_000_000L;
@ -172,7 +172,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
.build();
videoFrameProcessorTestRunner.queueInputBitmap(
@ -197,7 +197,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build())
.setInputType(INPUT_TYPE_BITMAP)
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
.setOnOutputFrameAvailableListener(
.setOnOutputFrameAvailableForRenderingListener(
unused -> checkNotNull(framesProduced).incrementAndGet());
}
}

View File

@ -54,23 +54,23 @@ import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for frame release in {@link DefaultVideoFrameProcessor}. */
/** Tests for frame rendering in {@link DefaultVideoFrameProcessor}. */
@RunWith(AndroidJUnit4.class)
public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
private static final int WIDTH = 200;
private static final int HEIGHT = 100;
/**
* Time to wait between releasing frames to avoid frame drops between GL and the {@link
* Time to wait between rendering frames to avoid frame drops between GL and the {@link
* ImageReader}.
*/
private static final long PER_FRAME_RELEASE_WAIT_TIME_MS = 1000L;
/** Maximum time to wait for each released frame to be notified. */
private static final long PER_FRAME_RENDERING_WAIT_TIME_MS = 1000L;
/** Maximum time to wait for each rendered frame to be notified. */
private static final long PER_FRAME_TIMEOUT_MS = 5000L;
private static final long MICROS_TO_NANOS = 1000L;
private final LinkedBlockingQueue<Long> outputReleaseTimesNs = new LinkedBlockingQueue<>();
private final LinkedBlockingQueue<Long> outputRenderTimesNs = new LinkedBlockingQueue<>();
private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
@ -82,22 +82,22 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
}
@Test
public void automaticFrameRelease_withOneFrame_reusesInputTimestamp() throws Exception {
public void automaticFrameRendering_withOneFrame_reusesInputTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ actualPresentationTimeUs::set,
/* releaseFramesAutomatically= */ true);
/* renderFramesAutomatically= */ true);
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualRenderTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
}
@Test
public void automaticFrameRelease_withThreeFrames_reusesInputTimestamps() throws Exception {
public void automaticFrameRendering_withThreeFrames_reusesInputTimestamps() throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
processFramesToEndOfStream(
@ -108,12 +108,12 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior.
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
},
/* releaseFramesAutomatically= */ true);
/* renderFramesAutomatically= */ true);
assertThat(actualPresentationTimesUs)
.containsExactly(
@ -121,9 +121,9 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
originalPresentationTimesUs[1],
originalPresentationTimesUs[2])
.inOrder();
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 3);
assertThat(actualReleaseTimesNs)
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 3);
assertThat(actualRenderTimesNs)
.containsExactly(
MICROS_TO_NANOS * originalPresentationTimesUs[0],
MICROS_TO_NANOS * originalPresentationTimesUs[1],
@ -132,67 +132,66 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
}
@Test
public void controlledFrameRelease_withOneFrame_usesGivenTimestamp() throws Exception {
public void controlledFrameRendering_withOneFrame_usesGivenTimestamp() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimesNs = System.nanoTime() + 345678;
long renderTimesNs = System.nanoTime() + 345678;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimesNs);
},
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).containsExactly(releaseTimesNs);
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualRenderTimesNs).containsExactly(renderTimesNs);
}
@Test
public void controlledFrameRelease_withOneFrameRequestImmediateRelease_releasesFrame()
public void controlledFrameRendering_withOneFrameRequestImmediateRender_rendersframe()
throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimesNs = VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY;
long renderTimesNs = VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimesNs);
},
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1);
// The actual render time is determined by the VideoFrameProcessor when rendering the frame.
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualRenderTimesNs).hasSize(1);
}
@Test
public void controlledFrameRelease_withLateFrame_releasesFrame() throws Exception {
public void controlledFrameRendering_withLateFrame_rendersframe() throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
long renderTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimeBeforeCurrentTimeNs);
},
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1);
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
assertThat(actualReleaseTimesNs.get(0)).isAtLeast(releaseTimeBeforeCurrentTimeNs);
ImmutableList<Long> actualRenderTimesNs =
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualRenderTimesNs).hasSize(1);
// The actual render time is determined by the VideoFrameProcessor when rendering the frame.
assertThat(actualRenderTimesNs.get(0)).isAtLeast(renderTimeBeforeCurrentTimeNs);
}
@Test
public void controlledFrameRelease_requestsFrameDropping_dropsFrame() throws Exception {
public void controlledFrameRendering_requestsFrameDropping_dropsFrame() throws Exception {
long originalPresentationTimeUs = 1234;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
@ -200,19 +199,19 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeUs.set(presentationTimeNs);
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
.renderOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
},
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 0);
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 0);
}
@Test
public void controlledFrameRelease_withThreeIndividualFrames_usesGivenTimestamps()
public void controlledFrameRendering_withThreeIndividualFrames_usesGivenTimestamps()
throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime();
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
long[] renderTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
AtomicInteger frameIndex = new AtomicInteger();
processFramesToEndOfStream(
@ -220,17 +219,17 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimesUs.add(presentationTimeUs);
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
.renderOutputFrame(renderTimesNs[frameIndex.getAndIncrement()]);
try {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior.
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
},
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
assertThat(actualPresentationTimesUs)
.containsExactly(
@ -240,31 +239,32 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
.inOrder();
int actualFrameCount = frameIndex.get();
assertThat(actualFrameCount).isEqualTo(originalPresentationTimesUs.length);
long[] actualReleaseTimesNs =
Longs.toArray(waitForFrameReleaseAndGetReleaseTimesNs(actualFrameCount));
assertThat(actualReleaseTimesNs).isEqualTo(releaseTimesNs);
long[] actualRenderTimesNs =
Longs.toArray(waitForFrameRenderingAndGetRenderTimesNs(actualFrameCount));
assertThat(actualRenderTimesNs).isEqualTo(renderTimesNs);
}
@Test
public void controlledFrameRelease_withThreeFramesAtOnce_usesGivenTimestamps() throws Exception {
public void controlledFrameRendering_withThreeFramesAtOnce_usesGivenTimestamps()
throws Exception {
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
long offsetNs = System.nanoTime();
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
long[] renderTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ actualPresentationTimesUs::add,
/* releaseFramesAutomatically= */ false);
/* renderFramesAutomatically= */ false);
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior.
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[0]);
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[1]);
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[2]);
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
assertThat(actualPresentationTimesUs)
.containsExactly(
@ -272,20 +272,20 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
originalPresentationTimesUs[1],
originalPresentationTimesUs[2])
.inOrder();
long[] actualReleaseTimesNs =
Longs.toArray(waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 3));
assertThat(actualReleaseTimesNs).isEqualTo(releaseTimesNs);
long[] actualRenderTimesNs =
Longs.toArray(waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 3));
assertThat(actualRenderTimesNs).isEqualTo(renderTimesNs);
}
private interface OnOutputFrameAvailableListener {
void onFrameAvailable(long presentationTimeUs);
private interface OnOutputFrameAvailableForRenderingListener {
void onFrameAvailableForRendering(long presentationTimeUs);
}
@EnsuresNonNull("defaultVideoFrameProcessor")
private void processFramesToEndOfStream(
long[] inputPresentationTimesUs,
OnOutputFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically)
OnOutputFrameAvailableForRenderingListener onFrameAvailableListener,
boolean renderFramesAutomatically)
throws Exception {
AtomicReference<@NullableType VideoFrameProcessingException>
videoFrameProcessingExceptionReference = new AtomicReference<>();
@ -302,7 +302,7 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
INPUT_TYPE_SURFACE,
releaseFramesAutomatically,
renderFramesAutomatically,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
@Override
@ -319,15 +319,15 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireNextImage()) {
outputReleaseTimesNs.add(image.getTimestamp());
outputRenderTimesNs.add(image.getTimestamp());
}
},
Util.createHandlerForCurrentOrMainLooper());
}
@Override
public void onOutputFrameAvailable(long presentationTimeUs) {
onFrameAvailableListener.onFrameAvailable(presentationTimeUs);
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
onFrameAvailableListener.onFrameAvailableForRendering(presentationTimeUs);
}
@Override
@ -364,15 +364,15 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
}
}
private ImmutableList<Long> waitForFrameReleaseAndGetReleaseTimesNs(int expectedFrameCount)
private ImmutableList<Long> waitForFrameRenderingAndGetRenderTimesNs(int expectedFrameCount)
throws Exception {
ImmutableList.Builder<Long> listBuilder = new ImmutableList.Builder<>();
for (int i = 0; i < expectedFrameCount; i++) {
listBuilder.add(checkNotNull(outputReleaseTimesNs.poll(PER_FRAME_TIMEOUT_MS, MILLISECONDS)));
listBuilder.add(checkNotNull(outputRenderTimesNs.poll(PER_FRAME_TIMEOUT_MS, MILLISECONDS)));
}
// This is a best-effort check because there's no guarantee that frames aren't added to the
// release times after this method has been called.
assertThat(outputReleaseTimesNs).isEmpty();
// render times after this method has been called.
assertThat(outputRenderTimesNs).isEmpty();
return listBuilder.build();
}

View File

@ -188,7 +188,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@InputType int inputType,
boolean releaseFramesAutomatically,
boolean renderFramesAutomatically,
Executor listenerExecutor,
Listener listener)
throws VideoFrameProcessingException {
@ -227,7 +227,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
outputColorInfo,
enableColorTransfers,
inputType,
releaseFramesAutomatically,
renderFramesAutomatically,
singleThreadExecutorService,
listenerExecutor,
listener,
@ -253,7 +253,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private final EGLContext eglContext;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final InputHandler inputHandler;
private final boolean releaseFramesAutomatically;
private final boolean renderFramesAutomatically;
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
private final ImmutableList<GlShaderProgram> allShaderPrograms;
// A queue of input streams that have not been fully processed identified by their input types.
@ -271,13 +271,13 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@InputType int inputType,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically)
boolean renderFramesAutomatically)
throws VideoFrameProcessingException {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
this.renderFramesAutomatically = renderFramesAutomatically;
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
checkState(!shaderPrograms.isEmpty());
@ -411,12 +411,12 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
}
@Override
public void releaseOutputFrame(long releaseTimeNs) {
public void renderOutputFrame(long renderTimeNs) {
checkState(
!releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
!renderFramesAutomatically,
"Calling this method is not allowed when renderFramesAutomatically is enabled");
videoFrameProcessingTaskExecutor.submitWithHighPriority(
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
() -> finalShaderProgramWrapper.renderOutputFrame(renderTimeNs));
}
@Override
@ -494,7 +494,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
ColorInfo outputColorInfo,
boolean enableColorTransfers,
@InputType int inputType,
boolean releaseFramesAutomatically,
boolean renderFramesAutomatically,
ExecutorService singleThreadExecutorService,
Executor executor,
Listener listener,
@ -514,9 +514,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
glObjectsProvider.createEglContext(eglDisplay, openGlVersion, configAttributes);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
// Not releaseFramesAutomatically means outputting to a display surface. HDR display surfaces
// Not renderFramesAutomatically means outputting to a display surface. HDR display surfaces
// require the BT2020 PQ GL extension.
if (!releaseFramesAutomatically && ColorInfo.isTransferHdr(outputColorInfo)) {
if (!renderFramesAutomatically && ColorInfo.isTransferHdr(outputColorInfo)) {
// Display hardware supports PQ only.
checkArgument(outputColorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084);
if (Util.SDK_INT < 33 || !GlUtil.isBt2020PqExtensionSupported()) {
@ -538,7 +538,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
outputColorInfo,
enableColorTransfers,
inputType,
releaseFramesAutomatically,
renderFramesAutomatically,
executor,
listener,
glObjectsProvider,
@ -555,7 +555,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
inputType,
videoFrameProcessingTaskExecutor,
shaderPrograms,
releaseFramesAutomatically);
renderFramesAutomatically);
}
/**
@ -579,7 +579,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
ColorInfo outputColorInfo,
boolean enableColorTransfers,
@InputType int inputType,
boolean releaseFramesAutomatically,
boolean renderFramesAutomatically,
Executor executor,
Listener listener,
GlObjectsProvider glObjectsProvider,
@ -666,7 +666,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
enableColorTransfers,
sampleFromInputTexture,
inputType,
releaseFramesAutomatically,
renderFramesAutomatically,
executor,
listener,
glObjectsProvider,

View File

@ -87,7 +87,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo;
private final boolean enableColorTransfers;
private final boolean releaseFramesAutomatically;
private final boolean renderFramesAutomatically;
private final Executor videoFrameProcessorListenerExecutor;
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final float[] textureTransformMatrix;
@ -129,7 +129,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean enableColorTransfers,
boolean sampleFromInputTexture,
@VideoFrameProcessor.InputType int inputType,
boolean releaseFramesAutomatically,
boolean renderFramesAutomatically,
Executor videoFrameProcessorListenerExecutor,
VideoFrameProcessor.Listener videoFrameProcessorListener,
GlObjectsProvider glObjectsProvider,
@ -145,7 +145,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo;
this.enableColorTransfers = enableColorTransfers;
this.releaseFramesAutomatically = releaseFramesAutomatically;
this.renderFramesAutomatically = renderFramesAutomatically;
this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
this.videoFrameProcessorListener = videoFrameProcessorListener;
this.glObjectsProvider = glObjectsProvider;
@ -203,9 +203,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
frameProcessingStarted = true;
videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onOutputFrameAvailable(presentationTimeUs));
if (releaseFramesAutomatically) {
renderFrame(inputTexture, presentationTimeUs, /* releaseTimeNs= */ presentationTimeUs * 1000);
() -> videoFrameProcessorListener.onOutputFrameAvailableForRendering(presentationTimeUs));
if (renderFramesAutomatically) {
renderFrame(inputTexture, presentationTimeUs, /* renderTimeNs= */ presentationTimeUs * 1000);
} else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
}
@ -218,20 +218,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
throw new UnsupportedOperationException();
}
public void releaseOutputFrame(long releaseTimeNs) {
public void renderOutputFrame(long renderTimeNs) {
frameProcessingStarted = true;
checkState(!releaseFramesAutomatically);
checkState(!renderFramesAutomatically);
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
renderFrame(
/* inputTexture= */ oldestAvailableFrame.first,
/* presentationTimeUs= */ oldestAvailableFrame.second,
releaseTimeNs);
renderTimeNs);
}
@Override
public void flush() {
frameProcessingStarted = true;
// Drops all frames that aren't released yet.
// Drops all frames that aren't rendered yet.
availableFrames.clear();
if (defaultShaderProgram != null) {
defaultShaderProgram.flush();
@ -302,15 +302,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private synchronized void renderFrame(
GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) {
GlTextureInfo inputTexture, long presentationTimeUs, long renderTimeNs) {
try {
if (releaseTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
if (renderTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|| !ensureConfigured(inputTexture.width, inputTexture.height)) {
inputListener.onInputFrameProcessed(inputTexture);
return; // Drop frames when requested, or there is no output surface.
}
if (outputSurfaceInfo != null) {
renderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs);
renderFrameToOutputSurface(inputTexture, presentationTimeUs, renderTimeNs);
}
if (textureOutputListener != null) {
renderFrameToOutputTexture(inputTexture, presentationTimeUs);
@ -329,7 +329,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private synchronized void renderFrameToOutputSurface(
GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs)
GlTextureInfo inputTexture, long presentationTimeUs, long renderTimeNs)
throws VideoFrameProcessingException, GlUtil.GlException {
EGLSurface outputEglSurface = checkNotNull(this.outputEglSurface);
SurfaceInfo outputSurfaceInfo = checkNotNull(this.outputSurfaceInfo);
@ -347,9 +347,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
EGLExt.eglPresentationTimeANDROID(
eglDisplay,
outputEglSurface,
releaseTimeNs == VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY
renderTimeNs == VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY
? System.nanoTime()
: releaseTimeNs);
: renderTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
}
@ -427,8 +427,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
eglDisplay,
outputSurfaceInfo.surface,
outputColorInfo.colorTransfer,
// Frames are only released automatically when outputting to an encoder.
/* isEncoderInputSurface= */ releaseFramesAutomatically);
// Frames are only rendered automatically when outputting to an encoder.
/* isEncoderInputSurface= */ renderFramesAutomatically);
}
@Nullable

View File

@ -2027,7 +2027,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
inputAndOutputColorInfos.first,
inputAndOutputColorInfos.second,
INPUT_TYPE_SURFACE,
/* releaseFramesAutomatically= */ false,
/* renderFramesAutomatically= */ false,
/* executor= */ handler::post,
new VideoFrameProcessor.Listener() {
@Override
@ -2048,7 +2048,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
@Override
public void onOutputFrameAvailable(long presentationTimeUs) {
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
if (registeredLastFrame) {
checkState(lastCodecBufferPresentationTimestampUs != C.TIME_UNSET);
}
@ -2254,7 +2254,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs);
if (shouldReleaseFrameImmediately) {
releaseProcessedFrameInternal(
VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
break;
} else if (!isStarted || positionUs == renderer.initialPositionUs) {
return;
@ -2313,8 +2313,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) {
// VideoFrameProcessor renders to its output surface using
// VideoFrameProcessor.renderOutputFrame, to release the MediaCodecVideoRenderer frame.
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.releaseOutputFrame(releaseTimeNs);
videoFrameProcessor.renderOutputFrame(releaseTimeNs);
processedFramesTimestampsUs.remove();
renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
if (releaseTimeNs != VideoFrameProcessor.DROP_OUTPUT_FRAME) {

View File

@ -68,7 +68,7 @@ public final class VideoFrameProcessorTestRunner {
private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull ColorInfo outputColorInfo;
private @VideoFrameProcessor.InputType int inputType;
private OnOutputFrameAvailableListener onOutputFrameAvailableListener;
private OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener;
/** Creates a new instance with default values. */
public Builder() {
@ -202,13 +202,14 @@ public final class VideoFrameProcessorTestRunner {
}
/**
* Sets the method to be called in {@link VideoFrameProcessor.Listener#onOutputFrameAvailable}.
* Sets the method to be called in {@link
* VideoFrameProcessor.Listener#onOutputFrameAvailableForRendering}.
*
* <p>The default value is a no-op.
*/
@CanIgnoreReturnValue
public Builder setOnOutputFrameAvailableListener(
OnOutputFrameAvailableListener onOutputFrameAvailableListener) {
public Builder setOnOutputFrameAvailableForRenderingListener(
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener) {
this.onOutputFrameAvailableListener = onOutputFrameAvailableListener;
return this;
}
@ -260,7 +261,7 @@ public final class VideoFrameProcessorTestRunner {
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
@VideoFrameProcessor.InputType int inputType,
OnOutputFrameAvailableListener onOutputFrameAvailableListener)
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener)
throws VideoFrameProcessingException {
this.testId = testId;
this.bitmapReader = bitmapReader;
@ -277,7 +278,7 @@ public final class VideoFrameProcessorTestRunner {
inputColorInfo,
outputColorInfo,
inputType,
/* releaseFramesAutomatically= */ true,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
@Override
@ -296,9 +297,10 @@ public final class VideoFrameProcessorTestRunner {
}
@Override
public void onOutputFrameAvailable(long presentationTimeUs) {
// Do nothing as frames are released automatically.
onOutputFrameAvailableListener.onFrameAvailable(presentationTimeUs);
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
// Do nothing as frames are rendered automatically.
onOutputFrameAvailableForRenderingListener.onFrameAvailableForRendering(
presentationTimeUs);
}
@Override
@ -378,8 +380,8 @@ public final class VideoFrameProcessorTestRunner {
}
}
public interface OnOutputFrameAvailableListener {
void onFrameAvailable(long presentationTimeUs);
public interface OnOutputFrameAvailableForRenderingListener {
void onFrameAvailableForRendering(long presentationTimeUs);
}
/** Reads a {@link Bitmap} from {@link VideoFrameProcessor} output. */

View File

@ -159,7 +159,7 @@ import org.checkerframework.dataflow.qual.Pure;
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
inputType,
/* releaseFramesAutomatically= */ true,
/* renderFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs;
@ -175,8 +175,8 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public void onOutputFrameAvailable(long presentationTimeUs) {
// Frames are released automatically.
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
// Frames are rendered automatically.
if (presentationTimeUs == 0) {
encoderExpectsTimestampZero = true;
}