mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
effect: Rename VFP frame release to render.
renderOutputFrame actually renders frames to an output surface. We'll soon have a releaseOutputFrame method, that would release resources associated with an output time, so rename this to disambiguate the two methods. Also rename onOutputFrameAvailable to onOutputFrameAvailableForRendering, to make it clear this is not available for "release" This change should be a renaming-only change and have no functional differences. PiperOrigin-RevId: 527844947
This commit is contained in:
parent
3a3322dbc5
commit
24343f55af
@ -79,10 +79,10 @@ public interface VideoFrameProcessor {
|
|||||||
* @param inputColorInfo The {@link ColorInfo} for input frames.
|
* @param inputColorInfo The {@link ColorInfo} for input frames.
|
||||||
* @param outputColorInfo The {@link ColorInfo} for output frames.
|
* @param outputColorInfo The {@link ColorInfo} for output frames.
|
||||||
* @param inputType The {@link InputType}.
|
* @param inputType The {@link InputType}.
|
||||||
* @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
|
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
|
||||||
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
|
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
|
||||||
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
|
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
|
||||||
* VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
|
* VideoFrameProcessor} will block until {@link #renderOutputFrame(long)} is called, to
|
||||||
* render or drop the frame.
|
* render or drop the frame.
|
||||||
* @param executor The {@link Executor} on which the {@code listener} is invoked.
|
* @param executor The {@link Executor} on which the {@code listener} is invoked.
|
||||||
* @param listener A {@link Listener}.
|
* @param listener A {@link Listener}.
|
||||||
@ -97,7 +97,7 @@ public interface VideoFrameProcessor {
|
|||||||
ColorInfo inputColorInfo,
|
ColorInfo inputColorInfo,
|
||||||
ColorInfo outputColorInfo,
|
ColorInfo outputColorInfo,
|
||||||
@InputType int inputType,
|
@InputType int inputType,
|
||||||
boolean releaseFramesAutomatically,
|
boolean renderFramesAutomatically,
|
||||||
Executor executor,
|
Executor executor,
|
||||||
Listener listener)
|
Listener listener)
|
||||||
throws VideoFrameProcessingException;
|
throws VideoFrameProcessingException;
|
||||||
@ -128,7 +128,7 @@ public interface VideoFrameProcessor {
|
|||||||
*
|
*
|
||||||
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
||||||
*/
|
*/
|
||||||
void onOutputFrameAvailable(long presentationTimeUs);
|
void onOutputFrameAvailableForRendering(long presentationTimeUs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called when an exception occurs during asynchronous video frame processing.
|
* Called when an exception occurs during asynchronous video frame processing.
|
||||||
@ -143,12 +143,12 @@ public interface VideoFrameProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates the frame should be released immediately after {@link #releaseOutputFrame(long)} is
|
* Indicates the frame should be rendered immediately after {@link #renderOutputFrame(long)} is
|
||||||
* invoked.
|
* invoked.
|
||||||
*/
|
*/
|
||||||
long RELEASE_OUTPUT_FRAME_IMMEDIATELY = -1;
|
long RENDER_OUTPUT_FRAME_IMMEDIATELY = -1;
|
||||||
|
|
||||||
/** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */
|
/** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */
|
||||||
long DROP_OUTPUT_FRAME = -2;
|
long DROP_OUTPUT_FRAME = -2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -226,7 +226,7 @@ public interface VideoFrameProcessor {
|
|||||||
int getPendingInputFrameCount();
|
int getPendingInputFrameCount();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the output surface and supporting information. When output frames are released and not
|
* Sets the output surface and supporting information. When output frames are rendered and not
|
||||||
* dropped, they will be rendered to this output {@link SurfaceInfo}.
|
* dropped, they will be rendered to this output {@link SurfaceInfo}.
|
||||||
*
|
*
|
||||||
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
|
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
|
||||||
@ -244,24 +244,25 @@ public interface VideoFrameProcessor {
|
|||||||
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
|
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Releases the oldest unreleased output frame that has become {@linkplain
|
* Renders the oldest unrendered output frame that has become {@linkplain
|
||||||
* Listener#onOutputFrameAvailable(long) available} at the given {@code releaseTimeNs}.
|
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
|
||||||
|
* renderTimeNs}.
|
||||||
*
|
*
|
||||||
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
||||||
* surface}, or drop the frame, per {@code releaseTimeNs}.
|
* surface}, or drop the frame, per {@code renderTimeNs}.
|
||||||
*
|
*
|
||||||
* <p>This method must only be called if {@code releaseFramesAutomatically} was set to {@code
|
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
|
||||||
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
|
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
|
||||||
* {@linkplain Listener#onOutputFrameAvailable(long) available}.
|
* {@linkplain Listener#onOutputFrameAvailableForRendering(long) available for rendering}.
|
||||||
*
|
*
|
||||||
* <p>The {@code releaseTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
|
* <p>The {@code renderTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
|
||||||
* depending on the implementation.
|
* depending on the implementation.
|
||||||
*
|
*
|
||||||
* @param releaseTimeNs The release time to use for the frame, in nanoseconds. The release time
|
* @param renderTimeNs The render time to use for the frame, in nanoseconds. The render time can
|
||||||
* can be before of after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
|
* be before or after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
|
||||||
* frame, or {@link #RELEASE_OUTPUT_FRAME_IMMEDIATELY} to release the frame immediately.
|
* frame, or {@link #RENDER_OUTPUT_FRAME_IMMEDIATELY} to render the frame immediately.
|
||||||
*/
|
*/
|
||||||
void releaseOutputFrame(long releaseTimeNs);
|
void renderOutputFrame(long renderTimeNs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
|
* Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
|
||||||
|
@ -114,7 +114,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
|
|||||||
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
||||||
videoFrameProcessorTestRunner =
|
videoFrameProcessorTestRunner =
|
||||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||||
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
|
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
long offsetUs = 1_000_000L;
|
long offsetUs = 1_000_000L;
|
||||||
@ -137,7 +137,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
|
|||||||
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
||||||
videoFrameProcessorTestRunner =
|
videoFrameProcessorTestRunner =
|
||||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||||
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
|
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
long offsetUs1 = 1_000_000L;
|
long offsetUs1 = 1_000_000L;
|
||||||
@ -172,7 +172,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
|
|||||||
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
Queue<Long> actualPresentationTimesUs = new ConcurrentLinkedQueue<>();
|
||||||
videoFrameProcessorTestRunner =
|
videoFrameProcessorTestRunner =
|
||||||
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
getDefaultFrameProcessorTestRunnerBuilder(testId)
|
||||||
.setOnOutputFrameAvailableListener(actualPresentationTimesUs::add)
|
.setOnOutputFrameAvailableForRenderingListener(actualPresentationTimesUs::add)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
videoFrameProcessorTestRunner.queueInputBitmap(
|
videoFrameProcessorTestRunner.queueInputBitmap(
|
||||||
@ -197,7 +197,7 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
|
|||||||
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build())
|
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory.Builder().build())
|
||||||
.setInputType(INPUT_TYPE_BITMAP)
|
.setInputType(INPUT_TYPE_BITMAP)
|
||||||
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
|
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
|
||||||
.setOnOutputFrameAvailableListener(
|
.setOnOutputFrameAvailableForRenderingListener(
|
||||||
unused -> checkNotNull(framesProduced).incrementAndGet());
|
unused -> checkNotNull(framesProduced).incrementAndGet());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,23 +54,23 @@ import org.junit.After;
|
|||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
/** Tests for frame release in {@link DefaultVideoFrameProcessor}. */
|
/** Tests for frame rendering in {@link DefaultVideoFrameProcessor}. */
|
||||||
@RunWith(AndroidJUnit4.class)
|
@RunWith(AndroidJUnit4.class)
|
||||||
public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
|
||||||
|
|
||||||
private static final int WIDTH = 200;
|
private static final int WIDTH = 200;
|
||||||
private static final int HEIGHT = 100;
|
private static final int HEIGHT = 100;
|
||||||
/**
|
/**
|
||||||
* Time to wait between releasing frames to avoid frame drops between GL and the {@link
|
* Time to wait between rendering frames to avoid frame drops between GL and the {@link
|
||||||
* ImageReader}.
|
* ImageReader}.
|
||||||
*/
|
*/
|
||||||
private static final long PER_FRAME_RELEASE_WAIT_TIME_MS = 1000L;
|
private static final long PER_FRAME_RENDERING_WAIT_TIME_MS = 1000L;
|
||||||
/** Maximum time to wait for each released frame to be notified. */
|
/** Maximum time to wait for each rendered frame to be notified. */
|
||||||
private static final long PER_FRAME_TIMEOUT_MS = 5000L;
|
private static final long PER_FRAME_TIMEOUT_MS = 5000L;
|
||||||
|
|
||||||
private static final long MICROS_TO_NANOS = 1000L;
|
private static final long MICROS_TO_NANOS = 1000L;
|
||||||
|
|
||||||
private final LinkedBlockingQueue<Long> outputReleaseTimesNs = new LinkedBlockingQueue<>();
|
private final LinkedBlockingQueue<Long> outputRenderTimesNs = new LinkedBlockingQueue<>();
|
||||||
|
|
||||||
private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
|
private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
|
||||||
|
|
||||||
@ -82,22 +82,22 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void automaticFrameRelease_withOneFrame_reusesInputTimestamp() throws Exception {
|
public void automaticFrameRendering_withOneFrame_reusesInputTimestamp() throws Exception {
|
||||||
long originalPresentationTimeUs = 1234;
|
long originalPresentationTimeUs = 1234;
|
||||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
||||||
/* onFrameAvailableListener= */ actualPresentationTimeUs::set,
|
/* onFrameAvailableListener= */ actualPresentationTimeUs::set,
|
||||||
/* releaseFramesAutomatically= */ true);
|
/* renderFramesAutomatically= */ true);
|
||||||
|
|
||||||
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
|
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
|
||||||
ImmutableList<Long> actualReleaseTimesNs =
|
ImmutableList<Long> actualRenderTimesNs =
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
|
||||||
assertThat(actualReleaseTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
|
assertThat(actualRenderTimesNs).containsExactly(MICROS_TO_NANOS * originalPresentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void automaticFrameRelease_withThreeFrames_reusesInputTimestamps() throws Exception {
|
public void automaticFrameRendering_withThreeFrames_reusesInputTimestamps() throws Exception {
|
||||||
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
||||||
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
@ -108,12 +108,12 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
||||||
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
||||||
// to swap buffers, to avoid this behavior.
|
// to swap buffers, to avoid this behavior.
|
||||||
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
|
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
throw new IllegalStateException(e);
|
throw new IllegalStateException(e);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ true);
|
/* renderFramesAutomatically= */ true);
|
||||||
|
|
||||||
assertThat(actualPresentationTimesUs)
|
assertThat(actualPresentationTimesUs)
|
||||||
.containsExactly(
|
.containsExactly(
|
||||||
@ -121,9 +121,9 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
originalPresentationTimesUs[1],
|
originalPresentationTimesUs[1],
|
||||||
originalPresentationTimesUs[2])
|
originalPresentationTimesUs[2])
|
||||||
.inOrder();
|
.inOrder();
|
||||||
ImmutableList<Long> actualReleaseTimesNs =
|
ImmutableList<Long> actualRenderTimesNs =
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 3);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 3);
|
||||||
assertThat(actualReleaseTimesNs)
|
assertThat(actualRenderTimesNs)
|
||||||
.containsExactly(
|
.containsExactly(
|
||||||
MICROS_TO_NANOS * originalPresentationTimesUs[0],
|
MICROS_TO_NANOS * originalPresentationTimesUs[0],
|
||||||
MICROS_TO_NANOS * originalPresentationTimesUs[1],
|
MICROS_TO_NANOS * originalPresentationTimesUs[1],
|
||||||
@ -132,67 +132,66 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_withOneFrame_usesGivenTimestamp() throws Exception {
|
public void controlledFrameRendering_withOneFrame_usesGivenTimestamp() throws Exception {
|
||||||
long originalPresentationTimeUs = 1234;
|
long originalPresentationTimeUs = 1234;
|
||||||
long releaseTimesNs = System.nanoTime() + 345678;
|
long renderTimesNs = System.nanoTime() + 345678;
|
||||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
||||||
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
||||||
actualPresentationTimeUs.set(presentationTimeUs);
|
actualPresentationTimeUs.set(presentationTimeUs);
|
||||||
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
|
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimesNs);
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
ImmutableList<Long> actualReleaseTimesNs =
|
ImmutableList<Long> actualRenderTimesNs =
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
|
||||||
assertThat(actualReleaseTimesNs).containsExactly(releaseTimesNs);
|
assertThat(actualRenderTimesNs).containsExactly(renderTimesNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_withOneFrameRequestImmediateRelease_releasesFrame()
|
public void controlledFrameRendering_withOneFrameRequestImmediateRender_rendersframe()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
long originalPresentationTimeUs = 1234;
|
long originalPresentationTimeUs = 1234;
|
||||||
long releaseTimesNs = VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY;
|
long renderTimesNs = VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY;
|
||||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
||||||
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
||||||
actualPresentationTimeUs.set(presentationTimeUs);
|
actualPresentationTimeUs.set(presentationTimeUs);
|
||||||
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
|
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimesNs);
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
|
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
|
||||||
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
|
// The actual render time is determined by the VideoFrameProcessor when rendering the frame.
|
||||||
ImmutableList<Long> actualReleaseTimesNs =
|
ImmutableList<Long> actualRenderTimesNs =
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
|
||||||
assertThat(actualReleaseTimesNs).hasSize(1);
|
assertThat(actualRenderTimesNs).hasSize(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_withLateFrame_releasesFrame() throws Exception {
|
public void controlledFrameRendering_withLateFrame_rendersframe() throws Exception {
|
||||||
long originalPresentationTimeUs = 1234;
|
long originalPresentationTimeUs = 1234;
|
||||||
long releaseTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
|
long renderTimeBeforeCurrentTimeNs = System.nanoTime() - 345678;
|
||||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
|
||||||
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
||||||
actualPresentationTimeUs.set(presentationTimeUs);
|
actualPresentationTimeUs.set(presentationTimeUs);
|
||||||
checkNotNull(defaultVideoFrameProcessor)
|
checkNotNull(defaultVideoFrameProcessor).renderOutputFrame(renderTimeBeforeCurrentTimeNs);
|
||||||
.releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
|
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
ImmutableList<Long> actualReleaseTimesNs =
|
ImmutableList<Long> actualRenderTimesNs =
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 1);
|
||||||
assertThat(actualReleaseTimesNs).hasSize(1);
|
assertThat(actualRenderTimesNs).hasSize(1);
|
||||||
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
|
// The actual render time is determined by the VideoFrameProcessor when rendering the frame.
|
||||||
assertThat(actualReleaseTimesNs.get(0)).isAtLeast(releaseTimeBeforeCurrentTimeNs);
|
assertThat(actualRenderTimesNs.get(0)).isAtLeast(renderTimeBeforeCurrentTimeNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_requestsFrameDropping_dropsFrame() throws Exception {
|
public void controlledFrameRendering_requestsFrameDropping_dropsFrame() throws Exception {
|
||||||
long originalPresentationTimeUs = 1234;
|
long originalPresentationTimeUs = 1234;
|
||||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
@ -200,19 +199,19 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
/* onFrameAvailableListener= */ presentationTimeNs -> {
|
/* onFrameAvailableListener= */ presentationTimeNs -> {
|
||||||
actualPresentationTimeUs.set(presentationTimeNs);
|
actualPresentationTimeUs.set(presentationTimeNs);
|
||||||
checkNotNull(defaultVideoFrameProcessor)
|
checkNotNull(defaultVideoFrameProcessor)
|
||||||
.releaseOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
|
.renderOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 0);
|
waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_withThreeIndividualFrames_usesGivenTimestamps()
|
public void controlledFrameRendering_withThreeIndividualFrames_usesGivenTimestamps()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
||||||
long offsetNs = System.nanoTime();
|
long offsetNs = System.nanoTime();
|
||||||
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
|
long[] renderTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
|
||||||
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
||||||
AtomicInteger frameIndex = new AtomicInteger();
|
AtomicInteger frameIndex = new AtomicInteger();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
@ -220,17 +219,17 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
/* onFrameAvailableListener= */ presentationTimeUs -> {
|
||||||
actualPresentationTimesUs.add(presentationTimeUs);
|
actualPresentationTimesUs.add(presentationTimeUs);
|
||||||
checkNotNull(defaultVideoFrameProcessor)
|
checkNotNull(defaultVideoFrameProcessor)
|
||||||
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
|
.renderOutputFrame(renderTimesNs[frameIndex.getAndIncrement()]);
|
||||||
try {
|
try {
|
||||||
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
||||||
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
||||||
// to swap buffers, to avoid this behavior.
|
// to swap buffers, to avoid this behavior.
|
||||||
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
|
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
throw new IllegalStateException(e);
|
throw new IllegalStateException(e);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
assertThat(actualPresentationTimesUs)
|
assertThat(actualPresentationTimesUs)
|
||||||
.containsExactly(
|
.containsExactly(
|
||||||
@ -240,31 +239,32 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
.inOrder();
|
.inOrder();
|
||||||
int actualFrameCount = frameIndex.get();
|
int actualFrameCount = frameIndex.get();
|
||||||
assertThat(actualFrameCount).isEqualTo(originalPresentationTimesUs.length);
|
assertThat(actualFrameCount).isEqualTo(originalPresentationTimesUs.length);
|
||||||
long[] actualReleaseTimesNs =
|
long[] actualRenderTimesNs =
|
||||||
Longs.toArray(waitForFrameReleaseAndGetReleaseTimesNs(actualFrameCount));
|
Longs.toArray(waitForFrameRenderingAndGetRenderTimesNs(actualFrameCount));
|
||||||
assertThat(actualReleaseTimesNs).isEqualTo(releaseTimesNs);
|
assertThat(actualRenderTimesNs).isEqualTo(renderTimesNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void controlledFrameRelease_withThreeFramesAtOnce_usesGivenTimestamps() throws Exception {
|
public void controlledFrameRendering_withThreeFramesAtOnce_usesGivenTimestamps()
|
||||||
|
throws Exception {
|
||||||
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
long[] originalPresentationTimesUs = new long[] {1234, 3456, 4567};
|
||||||
long offsetNs = System.nanoTime();
|
long offsetNs = System.nanoTime();
|
||||||
long[] releaseTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
|
long[] renderTimesNs = new long[] {offsetNs + 123456, offsetNs + 234567, offsetNs + 345678};
|
||||||
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
ArrayList<Long> actualPresentationTimesUs = new ArrayList<>();
|
||||||
processFramesToEndOfStream(
|
processFramesToEndOfStream(
|
||||||
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
|
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
|
||||||
/* onFrameAvailableListener= */ actualPresentationTimesUs::add,
|
/* onFrameAvailableListener= */ actualPresentationTimesUs::add,
|
||||||
/* releaseFramesAutomatically= */ false);
|
/* renderFramesAutomatically= */ false);
|
||||||
|
|
||||||
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
|
||||||
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
|
||||||
// to swap buffers, to avoid this behavior.
|
// to swap buffers, to avoid this behavior.
|
||||||
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
|
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[0]);
|
||||||
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
|
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
|
||||||
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
|
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[1]);
|
||||||
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
|
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
|
||||||
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
|
defaultVideoFrameProcessor.renderOutputFrame(renderTimesNs[2]);
|
||||||
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
|
Thread.sleep(PER_FRAME_RENDERING_WAIT_TIME_MS);
|
||||||
|
|
||||||
assertThat(actualPresentationTimesUs)
|
assertThat(actualPresentationTimesUs)
|
||||||
.containsExactly(
|
.containsExactly(
|
||||||
@ -272,20 +272,20 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
originalPresentationTimesUs[1],
|
originalPresentationTimesUs[1],
|
||||||
originalPresentationTimesUs[2])
|
originalPresentationTimesUs[2])
|
||||||
.inOrder();
|
.inOrder();
|
||||||
long[] actualReleaseTimesNs =
|
long[] actualRenderTimesNs =
|
||||||
Longs.toArray(waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 3));
|
Longs.toArray(waitForFrameRenderingAndGetRenderTimesNs(/* expectedFrameCount= */ 3));
|
||||||
assertThat(actualReleaseTimesNs).isEqualTo(releaseTimesNs);
|
assertThat(actualRenderTimesNs).isEqualTo(renderTimesNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
private interface OnOutputFrameAvailableListener {
|
private interface OnOutputFrameAvailableForRenderingListener {
|
||||||
void onFrameAvailable(long presentationTimeUs);
|
void onFrameAvailableForRendering(long presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@EnsuresNonNull("defaultVideoFrameProcessor")
|
@EnsuresNonNull("defaultVideoFrameProcessor")
|
||||||
private void processFramesToEndOfStream(
|
private void processFramesToEndOfStream(
|
||||||
long[] inputPresentationTimesUs,
|
long[] inputPresentationTimesUs,
|
||||||
OnOutputFrameAvailableListener onFrameAvailableListener,
|
OnOutputFrameAvailableForRenderingListener onFrameAvailableListener,
|
||||||
boolean releaseFramesAutomatically)
|
boolean renderFramesAutomatically)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
AtomicReference<@NullableType VideoFrameProcessingException>
|
AtomicReference<@NullableType VideoFrameProcessingException>
|
||||||
videoFrameProcessingExceptionReference = new AtomicReference<>();
|
videoFrameProcessingExceptionReference = new AtomicReference<>();
|
||||||
@ -302,7 +302,7 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
/* inputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
/* outputColorInfo= */ ColorInfo.SDR_BT709_LIMITED,
|
||||||
INPUT_TYPE_SURFACE,
|
INPUT_TYPE_SURFACE,
|
||||||
releaseFramesAutomatically,
|
renderFramesAutomatically,
|
||||||
MoreExecutors.directExecutor(),
|
MoreExecutors.directExecutor(),
|
||||||
new VideoFrameProcessor.Listener() {
|
new VideoFrameProcessor.Listener() {
|
||||||
@Override
|
@Override
|
||||||
@ -319,15 +319,15 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
outputImageReader.setOnImageAvailableListener(
|
outputImageReader.setOnImageAvailableListener(
|
||||||
imageReader -> {
|
imageReader -> {
|
||||||
try (Image image = imageReader.acquireNextImage()) {
|
try (Image image = imageReader.acquireNextImage()) {
|
||||||
outputReleaseTimesNs.add(image.getTimestamp());
|
outputRenderTimesNs.add(image.getTimestamp());
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Util.createHandlerForCurrentOrMainLooper());
|
Util.createHandlerForCurrentOrMainLooper());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailable(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||||
onFrameAvailableListener.onFrameAvailable(presentationTimeUs);
|
onFrameAvailableListener.onFrameAvailableForRendering(presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -364,15 +364,15 @@ public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ImmutableList<Long> waitForFrameReleaseAndGetReleaseTimesNs(int expectedFrameCount)
|
private ImmutableList<Long> waitForFrameRenderingAndGetRenderTimesNs(int expectedFrameCount)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
ImmutableList.Builder<Long> listBuilder = new ImmutableList.Builder<>();
|
ImmutableList.Builder<Long> listBuilder = new ImmutableList.Builder<>();
|
||||||
for (int i = 0; i < expectedFrameCount; i++) {
|
for (int i = 0; i < expectedFrameCount; i++) {
|
||||||
listBuilder.add(checkNotNull(outputReleaseTimesNs.poll(PER_FRAME_TIMEOUT_MS, MILLISECONDS)));
|
listBuilder.add(checkNotNull(outputRenderTimesNs.poll(PER_FRAME_TIMEOUT_MS, MILLISECONDS)));
|
||||||
}
|
}
|
||||||
// This is a best-effort check because there's no guarantee that frames aren't added to the
|
// This is a best-effort check because there's no guarantee that frames aren't added to the
|
||||||
// release times after this method has been called.
|
// render times after this method has been called.
|
||||||
assertThat(outputReleaseTimesNs).isEmpty();
|
assertThat(outputRenderTimesNs).isEmpty();
|
||||||
return listBuilder.build();
|
return listBuilder.build();
|
||||||
}
|
}
|
||||||
|
|
@ -188,7 +188,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
ColorInfo inputColorInfo,
|
ColorInfo inputColorInfo,
|
||||||
ColorInfo outputColorInfo,
|
ColorInfo outputColorInfo,
|
||||||
@InputType int inputType,
|
@InputType int inputType,
|
||||||
boolean releaseFramesAutomatically,
|
boolean renderFramesAutomatically,
|
||||||
Executor listenerExecutor,
|
Executor listenerExecutor,
|
||||||
Listener listener)
|
Listener listener)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
@ -227,7 +227,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
outputColorInfo,
|
outputColorInfo,
|
||||||
enableColorTransfers,
|
enableColorTransfers,
|
||||||
inputType,
|
inputType,
|
||||||
releaseFramesAutomatically,
|
renderFramesAutomatically,
|
||||||
singleThreadExecutorService,
|
singleThreadExecutorService,
|
||||||
listenerExecutor,
|
listenerExecutor,
|
||||||
listener,
|
listener,
|
||||||
@ -253,7 +253,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
private final EGLContext eglContext;
|
private final EGLContext eglContext;
|
||||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||||
private final InputHandler inputHandler;
|
private final InputHandler inputHandler;
|
||||||
private final boolean releaseFramesAutomatically;
|
private final boolean renderFramesAutomatically;
|
||||||
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
|
private final FinalShaderProgramWrapper finalShaderProgramWrapper;
|
||||||
private final ImmutableList<GlShaderProgram> allShaderPrograms;
|
private final ImmutableList<GlShaderProgram> allShaderPrograms;
|
||||||
// A queue of input streams that have not been fully processed identified by their input types.
|
// A queue of input streams that have not been fully processed identified by their input types.
|
||||||
@ -271,13 +271,13 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
@InputType int inputType,
|
@InputType int inputType,
|
||||||
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
|
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
|
||||||
ImmutableList<GlShaderProgram> shaderPrograms,
|
ImmutableList<GlShaderProgram> shaderPrograms,
|
||||||
boolean releaseFramesAutomatically)
|
boolean renderFramesAutomatically)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
|
|
||||||
this.eglDisplay = eglDisplay;
|
this.eglDisplay = eglDisplay;
|
||||||
this.eglContext = eglContext;
|
this.eglContext = eglContext;
|
||||||
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
|
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
|
||||||
this.releaseFramesAutomatically = releaseFramesAutomatically;
|
this.renderFramesAutomatically = renderFramesAutomatically;
|
||||||
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
|
this.unprocessedInputStreams = new ConcurrentLinkedQueue<>();
|
||||||
|
|
||||||
checkState(!shaderPrograms.isEmpty());
|
checkState(!shaderPrograms.isEmpty());
|
||||||
@ -411,12 +411,12 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void releaseOutputFrame(long releaseTimeNs) {
|
public void renderOutputFrame(long renderTimeNs) {
|
||||||
checkState(
|
checkState(
|
||||||
!releaseFramesAutomatically,
|
!renderFramesAutomatically,
|
||||||
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
|
"Calling this method is not allowed when renderFramesAutomatically is enabled");
|
||||||
videoFrameProcessingTaskExecutor.submitWithHighPriority(
|
videoFrameProcessingTaskExecutor.submitWithHighPriority(
|
||||||
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
|
() -> finalShaderProgramWrapper.renderOutputFrame(renderTimeNs));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -494,7 +494,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
ColorInfo outputColorInfo,
|
ColorInfo outputColorInfo,
|
||||||
boolean enableColorTransfers,
|
boolean enableColorTransfers,
|
||||||
@InputType int inputType,
|
@InputType int inputType,
|
||||||
boolean releaseFramesAutomatically,
|
boolean renderFramesAutomatically,
|
||||||
ExecutorService singleThreadExecutorService,
|
ExecutorService singleThreadExecutorService,
|
||||||
Executor executor,
|
Executor executor,
|
||||||
Listener listener,
|
Listener listener,
|
||||||
@ -514,9 +514,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
glObjectsProvider.createEglContext(eglDisplay, openGlVersion, configAttributes);
|
glObjectsProvider.createEglContext(eglDisplay, openGlVersion, configAttributes);
|
||||||
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
|
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
|
||||||
|
|
||||||
// Not releaseFramesAutomatically means outputting to a display surface. HDR display surfaces
|
// Not renderFramesAutomatically means outputting to a display surface. HDR display surfaces
|
||||||
// require the BT2020 PQ GL extension.
|
// require the BT2020 PQ GL extension.
|
||||||
if (!releaseFramesAutomatically && ColorInfo.isTransferHdr(outputColorInfo)) {
|
if (!renderFramesAutomatically && ColorInfo.isTransferHdr(outputColorInfo)) {
|
||||||
// Display hardware supports PQ only.
|
// Display hardware supports PQ only.
|
||||||
checkArgument(outputColorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084);
|
checkArgument(outputColorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084);
|
||||||
if (Util.SDK_INT < 33 || !GlUtil.isBt2020PqExtensionSupported()) {
|
if (Util.SDK_INT < 33 || !GlUtil.isBt2020PqExtensionSupported()) {
|
||||||
@ -538,7 +538,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
outputColorInfo,
|
outputColorInfo,
|
||||||
enableColorTransfers,
|
enableColorTransfers,
|
||||||
inputType,
|
inputType,
|
||||||
releaseFramesAutomatically,
|
renderFramesAutomatically,
|
||||||
executor,
|
executor,
|
||||||
listener,
|
listener,
|
||||||
glObjectsProvider,
|
glObjectsProvider,
|
||||||
@ -555,7 +555,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
inputType,
|
inputType,
|
||||||
videoFrameProcessingTaskExecutor,
|
videoFrameProcessingTaskExecutor,
|
||||||
shaderPrograms,
|
shaderPrograms,
|
||||||
releaseFramesAutomatically);
|
renderFramesAutomatically);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -579,7 +579,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
ColorInfo outputColorInfo,
|
ColorInfo outputColorInfo,
|
||||||
boolean enableColorTransfers,
|
boolean enableColorTransfers,
|
||||||
@InputType int inputType,
|
@InputType int inputType,
|
||||||
boolean releaseFramesAutomatically,
|
boolean renderFramesAutomatically,
|
||||||
Executor executor,
|
Executor executor,
|
||||||
Listener listener,
|
Listener listener,
|
||||||
GlObjectsProvider glObjectsProvider,
|
GlObjectsProvider glObjectsProvider,
|
||||||
@ -666,7 +666,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
|||||||
enableColorTransfers,
|
enableColorTransfers,
|
||||||
sampleFromInputTexture,
|
sampleFromInputTexture,
|
||||||
inputType,
|
inputType,
|
||||||
releaseFramesAutomatically,
|
renderFramesAutomatically,
|
||||||
executor,
|
executor,
|
||||||
listener,
|
listener,
|
||||||
glObjectsProvider,
|
glObjectsProvider,
|
||||||
|
@ -87,7 +87,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private final ColorInfo inputColorInfo;
|
private final ColorInfo inputColorInfo;
|
||||||
private final ColorInfo outputColorInfo;
|
private final ColorInfo outputColorInfo;
|
||||||
private final boolean enableColorTransfers;
|
private final boolean enableColorTransfers;
|
||||||
private final boolean releaseFramesAutomatically;
|
private final boolean renderFramesAutomatically;
|
||||||
private final Executor videoFrameProcessorListenerExecutor;
|
private final Executor videoFrameProcessorListenerExecutor;
|
||||||
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
|
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
|
||||||
private final float[] textureTransformMatrix;
|
private final float[] textureTransformMatrix;
|
||||||
@ -129,7 +129,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
boolean enableColorTransfers,
|
boolean enableColorTransfers,
|
||||||
boolean sampleFromInputTexture,
|
boolean sampleFromInputTexture,
|
||||||
@VideoFrameProcessor.InputType int inputType,
|
@VideoFrameProcessor.InputType int inputType,
|
||||||
boolean releaseFramesAutomatically,
|
boolean renderFramesAutomatically,
|
||||||
Executor videoFrameProcessorListenerExecutor,
|
Executor videoFrameProcessorListenerExecutor,
|
||||||
VideoFrameProcessor.Listener videoFrameProcessorListener,
|
VideoFrameProcessor.Listener videoFrameProcessorListener,
|
||||||
GlObjectsProvider glObjectsProvider,
|
GlObjectsProvider glObjectsProvider,
|
||||||
@ -145,7 +145,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
this.inputColorInfo = inputColorInfo;
|
this.inputColorInfo = inputColorInfo;
|
||||||
this.outputColorInfo = outputColorInfo;
|
this.outputColorInfo = outputColorInfo;
|
||||||
this.enableColorTransfers = enableColorTransfers;
|
this.enableColorTransfers = enableColorTransfers;
|
||||||
this.releaseFramesAutomatically = releaseFramesAutomatically;
|
this.renderFramesAutomatically = renderFramesAutomatically;
|
||||||
this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
|
this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
|
||||||
this.videoFrameProcessorListener = videoFrameProcessorListener;
|
this.videoFrameProcessorListener = videoFrameProcessorListener;
|
||||||
this.glObjectsProvider = glObjectsProvider;
|
this.glObjectsProvider = glObjectsProvider;
|
||||||
@ -203,9 +203,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
|
public void queueInputFrame(GlTextureInfo inputTexture, long presentationTimeUs) {
|
||||||
frameProcessingStarted = true;
|
frameProcessingStarted = true;
|
||||||
videoFrameProcessorListenerExecutor.execute(
|
videoFrameProcessorListenerExecutor.execute(
|
||||||
() -> videoFrameProcessorListener.onOutputFrameAvailable(presentationTimeUs));
|
() -> videoFrameProcessorListener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
||||||
if (releaseFramesAutomatically) {
|
if (renderFramesAutomatically) {
|
||||||
renderFrame(inputTexture, presentationTimeUs, /* releaseTimeNs= */ presentationTimeUs * 1000);
|
renderFrame(inputTexture, presentationTimeUs, /* renderTimeNs= */ presentationTimeUs * 1000);
|
||||||
} else {
|
} else {
|
||||||
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
||||||
}
|
}
|
||||||
@ -218,20 +218,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void releaseOutputFrame(long releaseTimeNs) {
|
public void renderOutputFrame(long renderTimeNs) {
|
||||||
frameProcessingStarted = true;
|
frameProcessingStarted = true;
|
||||||
checkState(!releaseFramesAutomatically);
|
checkState(!renderFramesAutomatically);
|
||||||
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
|
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
|
||||||
renderFrame(
|
renderFrame(
|
||||||
/* inputTexture= */ oldestAvailableFrame.first,
|
/* inputTexture= */ oldestAvailableFrame.first,
|
||||||
/* presentationTimeUs= */ oldestAvailableFrame.second,
|
/* presentationTimeUs= */ oldestAvailableFrame.second,
|
||||||
releaseTimeNs);
|
renderTimeNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void flush() {
|
public void flush() {
|
||||||
frameProcessingStarted = true;
|
frameProcessingStarted = true;
|
||||||
// Drops all frames that aren't released yet.
|
// Drops all frames that aren't rendered yet.
|
||||||
availableFrames.clear();
|
availableFrames.clear();
|
||||||
if (defaultShaderProgram != null) {
|
if (defaultShaderProgram != null) {
|
||||||
defaultShaderProgram.flush();
|
defaultShaderProgram.flush();
|
||||||
@ -302,15 +302,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void renderFrame(
|
private synchronized void renderFrame(
|
||||||
GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) {
|
GlTextureInfo inputTexture, long presentationTimeUs, long renderTimeNs) {
|
||||||
try {
|
try {
|
||||||
if (releaseTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|
if (renderTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|
||||||
|| !ensureConfigured(inputTexture.width, inputTexture.height)) {
|
|| !ensureConfigured(inputTexture.width, inputTexture.height)) {
|
||||||
inputListener.onInputFrameProcessed(inputTexture);
|
inputListener.onInputFrameProcessed(inputTexture);
|
||||||
return; // Drop frames when requested, or there is no output surface.
|
return; // Drop frames when requested, or there is no output surface.
|
||||||
}
|
}
|
||||||
if (outputSurfaceInfo != null) {
|
if (outputSurfaceInfo != null) {
|
||||||
renderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs);
|
renderFrameToOutputSurface(inputTexture, presentationTimeUs, renderTimeNs);
|
||||||
}
|
}
|
||||||
if (textureOutputListener != null) {
|
if (textureOutputListener != null) {
|
||||||
renderFrameToOutputTexture(inputTexture, presentationTimeUs);
|
renderFrameToOutputTexture(inputTexture, presentationTimeUs);
|
||||||
@ -329,7 +329,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void renderFrameToOutputSurface(
|
private synchronized void renderFrameToOutputSurface(
|
||||||
GlTextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs)
|
GlTextureInfo inputTexture, long presentationTimeUs, long renderTimeNs)
|
||||||
throws VideoFrameProcessingException, GlUtil.GlException {
|
throws VideoFrameProcessingException, GlUtil.GlException {
|
||||||
EGLSurface outputEglSurface = checkNotNull(this.outputEglSurface);
|
EGLSurface outputEglSurface = checkNotNull(this.outputEglSurface);
|
||||||
SurfaceInfo outputSurfaceInfo = checkNotNull(this.outputSurfaceInfo);
|
SurfaceInfo outputSurfaceInfo = checkNotNull(this.outputSurfaceInfo);
|
||||||
@ -347,9 +347,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
EGLExt.eglPresentationTimeANDROID(
|
EGLExt.eglPresentationTimeANDROID(
|
||||||
eglDisplay,
|
eglDisplay,
|
||||||
outputEglSurface,
|
outputEglSurface,
|
||||||
releaseTimeNs == VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY
|
renderTimeNs == VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY
|
||||||
? System.nanoTime()
|
? System.nanoTime()
|
||||||
: releaseTimeNs);
|
: renderTimeNs);
|
||||||
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
|
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -427,8 +427,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
eglDisplay,
|
eglDisplay,
|
||||||
outputSurfaceInfo.surface,
|
outputSurfaceInfo.surface,
|
||||||
outputColorInfo.colorTransfer,
|
outputColorInfo.colorTransfer,
|
||||||
// Frames are only released automatically when outputting to an encoder.
|
// Frames are only rendered automatically when outputting to an encoder.
|
||||||
/* isEncoderInputSurface= */ releaseFramesAutomatically);
|
/* isEncoderInputSurface= */ renderFramesAutomatically);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@ -2027,7 +2027,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
|||||||
inputAndOutputColorInfos.first,
|
inputAndOutputColorInfos.first,
|
||||||
inputAndOutputColorInfos.second,
|
inputAndOutputColorInfos.second,
|
||||||
INPUT_TYPE_SURFACE,
|
INPUT_TYPE_SURFACE,
|
||||||
/* releaseFramesAutomatically= */ false,
|
/* renderFramesAutomatically= */ false,
|
||||||
/* executor= */ handler::post,
|
/* executor= */ handler::post,
|
||||||
new VideoFrameProcessor.Listener() {
|
new VideoFrameProcessor.Listener() {
|
||||||
@Override
|
@Override
|
||||||
@ -2048,7 +2048,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailable(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||||
if (registeredLastFrame) {
|
if (registeredLastFrame) {
|
||||||
checkState(lastCodecBufferPresentationTimestampUs != C.TIME_UNSET);
|
checkState(lastCodecBufferPresentationTimestampUs != C.TIME_UNSET);
|
||||||
}
|
}
|
||||||
@ -2254,7 +2254,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
|||||||
boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs);
|
boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs);
|
||||||
if (shouldReleaseFrameImmediately) {
|
if (shouldReleaseFrameImmediately) {
|
||||||
releaseProcessedFrameInternal(
|
releaseProcessedFrameInternal(
|
||||||
VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
|
VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
|
||||||
break;
|
break;
|
||||||
} else if (!isStarted || positionUs == renderer.initialPositionUs) {
|
} else if (!isStarted || positionUs == renderer.initialPositionUs) {
|
||||||
return;
|
return;
|
||||||
@ -2313,8 +2313,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) {
|
private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) {
|
||||||
|
// VideoFrameProcessor renders to its output surface using
|
||||||
|
// VideoFrameProcessor.renderOutputFrame, to release the MediaCodecVideoRenderer frame.
|
||||||
checkStateNotNull(videoFrameProcessor);
|
checkStateNotNull(videoFrameProcessor);
|
||||||
videoFrameProcessor.releaseOutputFrame(releaseTimeNs);
|
videoFrameProcessor.renderOutputFrame(releaseTimeNs);
|
||||||
processedFramesTimestampsUs.remove();
|
processedFramesTimestampsUs.remove();
|
||||||
renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
|
renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
|
||||||
if (releaseTimeNs != VideoFrameProcessor.DROP_OUTPUT_FRAME) {
|
if (releaseTimeNs != VideoFrameProcessor.DROP_OUTPUT_FRAME) {
|
||||||
|
@ -68,7 +68,7 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
private @MonotonicNonNull ColorInfo inputColorInfo;
|
private @MonotonicNonNull ColorInfo inputColorInfo;
|
||||||
private @MonotonicNonNull ColorInfo outputColorInfo;
|
private @MonotonicNonNull ColorInfo outputColorInfo;
|
||||||
private @VideoFrameProcessor.InputType int inputType;
|
private @VideoFrameProcessor.InputType int inputType;
|
||||||
private OnOutputFrameAvailableListener onOutputFrameAvailableListener;
|
private OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener;
|
||||||
|
|
||||||
/** Creates a new instance with default values. */
|
/** Creates a new instance with default values. */
|
||||||
public Builder() {
|
public Builder() {
|
||||||
@ -202,13 +202,14 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the method to be called in {@link VideoFrameProcessor.Listener#onOutputFrameAvailable}.
|
* Sets the method to be called in {@link
|
||||||
|
* VideoFrameProcessor.Listener#onOutputFrameAvailableForRendering}.
|
||||||
*
|
*
|
||||||
* <p>The default value is a no-op.
|
* <p>The default value is a no-op.
|
||||||
*/
|
*/
|
||||||
@CanIgnoreReturnValue
|
@CanIgnoreReturnValue
|
||||||
public Builder setOnOutputFrameAvailableListener(
|
public Builder setOnOutputFrameAvailableForRenderingListener(
|
||||||
OnOutputFrameAvailableListener onOutputFrameAvailableListener) {
|
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableListener) {
|
||||||
this.onOutputFrameAvailableListener = onOutputFrameAvailableListener;
|
this.onOutputFrameAvailableListener = onOutputFrameAvailableListener;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
@ -260,7 +261,7 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
ColorInfo inputColorInfo,
|
ColorInfo inputColorInfo,
|
||||||
ColorInfo outputColorInfo,
|
ColorInfo outputColorInfo,
|
||||||
@VideoFrameProcessor.InputType int inputType,
|
@VideoFrameProcessor.InputType int inputType,
|
||||||
OnOutputFrameAvailableListener onOutputFrameAvailableListener)
|
OnOutputFrameAvailableForRenderingListener onOutputFrameAvailableForRenderingListener)
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
this.testId = testId;
|
this.testId = testId;
|
||||||
this.bitmapReader = bitmapReader;
|
this.bitmapReader = bitmapReader;
|
||||||
@ -277,7 +278,7 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
inputColorInfo,
|
inputColorInfo,
|
||||||
outputColorInfo,
|
outputColorInfo,
|
||||||
inputType,
|
inputType,
|
||||||
/* releaseFramesAutomatically= */ true,
|
/* renderFramesAutomatically= */ true,
|
||||||
MoreExecutors.directExecutor(),
|
MoreExecutors.directExecutor(),
|
||||||
new VideoFrameProcessor.Listener() {
|
new VideoFrameProcessor.Listener() {
|
||||||
@Override
|
@Override
|
||||||
@ -296,9 +297,10 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailable(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||||
// Do nothing as frames are released automatically.
|
// Do nothing as frames are rendered automatically.
|
||||||
onOutputFrameAvailableListener.onFrameAvailable(presentationTimeUs);
|
onOutputFrameAvailableForRenderingListener.onFrameAvailableForRendering(
|
||||||
|
presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -378,8 +380,8 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public interface OnOutputFrameAvailableListener {
|
public interface OnOutputFrameAvailableForRenderingListener {
|
||||||
void onFrameAvailable(long presentationTimeUs);
|
void onFrameAvailableForRendering(long presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Reads a {@link Bitmap} from {@link VideoFrameProcessor} output. */
|
/** Reads a {@link Bitmap} from {@link VideoFrameProcessor} output. */
|
||||||
|
@ -159,7 +159,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
videoFrameProcessorInputColor,
|
videoFrameProcessorInputColor,
|
||||||
videoFrameProcessorOutputColor,
|
videoFrameProcessorOutputColor,
|
||||||
inputType,
|
inputType,
|
||||||
/* releaseFramesAutomatically= */ true,
|
/* renderFramesAutomatically= */ true,
|
||||||
MoreExecutors.directExecutor(),
|
MoreExecutors.directExecutor(),
|
||||||
new VideoFrameProcessor.Listener() {
|
new VideoFrameProcessor.Listener() {
|
||||||
private long lastProcessedFramePresentationTimeUs;
|
private long lastProcessedFramePresentationTimeUs;
|
||||||
@ -175,8 +175,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailable(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||||
// Frames are released automatically.
|
// Frames are rendered automatically.
|
||||||
if (presentationTimeUs == 0) {
|
if (presentationTimeUs == 0) {
|
||||||
encoderExpectsTimestampZero = true;
|
encoderExpectsTimestampZero = true;
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user