mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Use VideoFrameMetadataListener for replay notification
PiperOrigin-RevId: 738366305
This commit is contained in:
parent
d6b9988eb0
commit
8e56810b57
@ -172,8 +172,12 @@ public interface VideoFrameProcessor {
|
|||||||
* rendering.
|
* rendering.
|
||||||
*
|
*
|
||||||
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
||||||
|
* @param isRedrawnFrame Whether the frame is a frame that is {@linkplain #redraw redrawn},
|
||||||
|
* redrawn frames are rendered directly thus {@link #renderOutputFrame} must not be called
|
||||||
|
* on such frames.
|
||||||
*/
|
*/
|
||||||
default void onOutputFrameAvailableForRendering(long presentationTimeUs) {}
|
default void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called when an exception occurs during asynchronous video frame processing.
|
* Called when an exception occurs during asynchronous video frame processing.
|
||||||
@ -354,15 +358,16 @@ public interface VideoFrameProcessor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Renders the oldest unrendered output frame that has become {@linkplain
|
* Renders the oldest unrendered output frame that has become {@linkplain
|
||||||
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
|
* Listener#onOutputFrameAvailableForRendering(long, boolean) available for rendering} at the
|
||||||
* renderTimeNs}.
|
* given {@code renderTimeNs}.
|
||||||
*
|
*
|
||||||
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
||||||
* surface}, or drop the frame, per {@code renderTimeNs}.
|
* surface}, or drop the frame, per {@code renderTimeNs}.
|
||||||
*
|
*
|
||||||
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
|
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
|
||||||
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
|
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
|
||||||
* {@linkplain Listener#onOutputFrameAvailableForRendering(long) available for rendering}.
|
* {@linkplain Listener#onOutputFrameAvailableForRendering(long, boolean) available for
|
||||||
|
* rendering}.
|
||||||
*
|
*
|
||||||
* <p>The {@code renderTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
|
* <p>The {@code renderTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
|
||||||
* depending on the implementation.
|
* depending on the implementation.
|
||||||
@ -371,8 +376,8 @@ public interface VideoFrameProcessor {
|
|||||||
* be before or after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
|
* be before or after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
|
||||||
* frame or {@link #RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME} to render the frame to the
|
* frame or {@link #RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME} to render the frame to the
|
||||||
* {@linkplain #setOutputSurfaceInfo output surface} with the presentation timestamp seen in
|
* {@linkplain #setOutputSurfaceInfo output surface} with the presentation timestamp seen in
|
||||||
* {@link Listener#onOutputFrameAvailableForRendering(long)}. If the frame should be rendered
|
* {@link Listener#onOutputFrameAvailableForRendering(long, boolean)}. If the frame should be
|
||||||
* immediately, pass in {@link SystemClock#nanoTime()}.
|
* rendered immediately, pass in {@link SystemClock#nanoTime()}.
|
||||||
*/
|
*/
|
||||||
void renderOutputFrame(long renderTimeNs);
|
void renderOutputFrame(long renderTimeNs);
|
||||||
|
|
||||||
|
@ -93,8 +93,12 @@ public interface VideoGraph {
|
|||||||
* for rendering.
|
* for rendering.
|
||||||
*
|
*
|
||||||
* @param framePresentationTimeUs The presentation time of the frame, in microseconds.
|
* @param framePresentationTimeUs The presentation time of the frame, in microseconds.
|
||||||
|
* @param isRedrawnFrame Whether the frame is a frame that is {@linkplain #redraw redrawn},
|
||||||
|
* redrawn frames are rendered directly thus {@link #renderOutputFrame} must not be called
|
||||||
|
* on such frames.
|
||||||
*/
|
*/
|
||||||
default void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {}
|
default void onOutputFrameAvailableForRendering(
|
||||||
|
long framePresentationTimeUs, boolean isRedrawnFrame) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called after the {@link VideoGraph} has rendered its final output frame.
|
* Called after the {@link VideoGraph} has rendered its final output frame.
|
||||||
@ -224,8 +228,8 @@ public interface VideoGraph {
|
|||||||
* Renders the output frame from the {@code VideoGraph}.
|
* Renders the output frame from the {@code VideoGraph}.
|
||||||
*
|
*
|
||||||
* <p>This method must be called only for frames that have become {@linkplain
|
* <p>This method must be called only for frames that have become {@linkplain
|
||||||
* Listener#onOutputFrameAvailableForRendering(long) available}, calling the method renders the
|
* Listener#onOutputFrameAvailableForRendering available}, calling the method renders the frame
|
||||||
* frame that becomes available the earliest but not yet rendered.
|
* that becomes available the earliest but not yet rendered.
|
||||||
*
|
*
|
||||||
* @see VideoFrameProcessor#renderOutputFrame(long)
|
* @see VideoFrameProcessor#renderOutputFrame(long)
|
||||||
*/
|
*/
|
||||||
|
@ -225,7 +225,8 @@ public class DefaultVideoFrameProcessorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
outputFrameCount++;
|
outputFrameCount++;
|
||||||
if (outputFrameCount == 30) {
|
if (outputFrameCount == 30) {
|
||||||
firstStreamLastFrameAvailableTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
|
firstStreamLastFrameAvailableTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
|
||||||
@ -312,7 +313,8 @@ public class DefaultVideoFrameProcessorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
outputFrameAvailableConditionVariable.open();
|
outputFrameAvailableConditionVariable.open();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -294,7 +294,8 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
onFrameAvailableListener.onFrameAvailableForRendering(presentationTimeUs);
|
onFrameAvailableListener.onFrameAvailableForRendering(presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,7 +139,8 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
actualPresentationTimesUs.add(presentationTimeUs);
|
actualPresentationTimesUs.add(presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,10 +220,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
public void queueInputFrame(
|
public void queueInputFrame(
|
||||||
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
|
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
|
||||||
videoFrameProcessingTaskExecutor.verifyVideoFrameProcessingThread();
|
videoFrameProcessingTaskExecutor.verifyVideoFrameProcessingThread();
|
||||||
|
|
||||||
if (!isWaitingForRedrawFrame()) {
|
if (!isWaitingForRedrawFrame()) {
|
||||||
// Don't report output available when redrawing - the redrawn frames are released immediately.
|
// Don't report output available when redrawing - the redrawn frames are released immediately.
|
||||||
videoFrameProcessorListenerExecutor.execute(
|
videoFrameProcessorListenerExecutor.execute(
|
||||||
() -> videoFrameProcessorListener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
() ->
|
||||||
|
videoFrameProcessorListener.onOutputFrameAvailableForRendering(
|
||||||
|
presentationTimeUs, /* isRedrawnFrame= */ false));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (textureOutputListener == null) {
|
if (textureOutputListener == null) {
|
||||||
@ -238,6 +241,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
if (isWaitingForRedrawFrame()) {
|
if (isWaitingForRedrawFrame()) {
|
||||||
if (presentationTimeUs == redrawFramePresentationTimeUs) {
|
if (presentationTimeUs == redrawFramePresentationTimeUs) {
|
||||||
redrawFramePresentationTimeUs = C.TIME_UNSET;
|
redrawFramePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
videoFrameProcessorListenerExecutor.execute(
|
||||||
|
() ->
|
||||||
|
videoFrameProcessorListener.onOutputFrameAvailableForRendering(
|
||||||
|
presentationTimeUs, /* isRedrawnFrame= */ true));
|
||||||
renderFrame(
|
renderFrame(
|
||||||
glObjectsProvider,
|
glObjectsProvider,
|
||||||
inputTexture,
|
inputTexture,
|
||||||
|
@ -229,14 +229,17 @@ public final class MultipleInputVideoGraph implements VideoGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
if (presentationTimeUs == 0) {
|
if (presentationTimeUs == 0) {
|
||||||
hasProducedFrameWithTimestampZero = true;
|
hasProducedFrameWithTimestampZero = true;
|
||||||
}
|
}
|
||||||
lastRenderedPresentationTimeUs = presentationTimeUs;
|
lastRenderedPresentationTimeUs = presentationTimeUs;
|
||||||
|
|
||||||
listenerExecutor.execute(
|
listenerExecutor.execute(
|
||||||
() -> listener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
() ->
|
||||||
|
listener.onOutputFrameAvailableForRendering(
|
||||||
|
presentationTimeUs, isRedrawnFrame));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -172,14 +172,17 @@ public class SingleInputVideoGraph implements VideoGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
// Frames are rendered automatically.
|
// Frames are rendered automatically.
|
||||||
if (presentationTimeUs == 0) {
|
if (presentationTimeUs == 0) {
|
||||||
hasProducedFrameWithTimestampZero = true;
|
hasProducedFrameWithTimestampZero = true;
|
||||||
}
|
}
|
||||||
lastProcessedFramePresentationTimeUs = presentationTimeUs;
|
lastProcessedFramePresentationTimeUs = presentationTimeUs;
|
||||||
listenerExecutor.execute(
|
listenerExecutor.execute(
|
||||||
() -> listener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
() ->
|
||||||
|
listener.onOutputFrameAvailableForRendering(
|
||||||
|
presentationTimeUs, isRedrawnFrame));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -297,6 +297,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
private Format videoGraphOutputFormat;
|
private Format videoGraphOutputFormat;
|
||||||
private @MonotonicNonNull HandlerWrapper handler;
|
private @MonotonicNonNull HandlerWrapper handler;
|
||||||
private @MonotonicNonNull VideoGraph videoGraph;
|
private @MonotonicNonNull VideoGraph videoGraph;
|
||||||
|
private @MonotonicNonNull VideoFrameMetadataListener videoFrameMetadataListener;
|
||||||
private long outputStreamStartPositionUs;
|
private long outputStreamStartPositionUs;
|
||||||
private @VideoSink.FirstFrameReleaseInstruction int nextFirstOutputFrameReleaseInstruction;
|
private @VideoSink.FirstFrameReleaseInstruction int nextFirstOutputFrameReleaseInstruction;
|
||||||
@Nullable private Pair<Surface, Size> currentSurfaceAndSize;
|
@Nullable private Pair<Surface, Size> currentSurfaceAndSize;
|
||||||
@ -438,7 +439,8 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long framePresentationTimeUs, boolean isRedrawnFrame) {
|
||||||
if (pendingFlushCount > 0) {
|
if (pendingFlushCount > 0) {
|
||||||
// Ignore available frames while flushing
|
// Ignore available frames while flushing
|
||||||
return;
|
return;
|
||||||
@ -447,9 +449,22 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
// Wake up the player when not playing to render the frame more promptly.
|
// Wake up the player when not playing to render the frame more promptly.
|
||||||
wakeupListener.onWakeup();
|
wakeupListener.onWakeup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
|
||||||
|
if (isRedrawnFrame) {
|
||||||
|
// Redrawn frames are rendered directly in the processing pipeline.
|
||||||
|
if (videoFrameMetadataListener != null) {
|
||||||
|
videoFrameMetadataListener.onVideoFrameAboutToBeRendered(
|
||||||
|
/* presentationTimeUs= */ bufferPresentationTimeUs,
|
||||||
|
/* releaseTimeNs= */ C.TIME_UNSET,
|
||||||
|
videoGraphOutputFormat,
|
||||||
|
/* mediaFormat= */ null);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// The frame presentation time is relative to the start of the Composition and without the
|
// The frame presentation time is relative to the start of the Composition and without the
|
||||||
// renderer offset
|
// renderer offset
|
||||||
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
|
|
||||||
lastOutputBufferPresentationTimeUs = bufferPresentationTimeUs;
|
lastOutputBufferPresentationTimeUs = bufferPresentationTimeUs;
|
||||||
Long newOutputStreamStartPositionUs =
|
Long newOutputStreamStartPositionUs =
|
||||||
streamStartPositionsUs.pollFloor(bufferPresentationTimeUs);
|
streamStartPositionsUs.pollFloor(bufferPresentationTimeUs);
|
||||||
@ -614,6 +629,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
|
|||||||
|
|
||||||
private void setVideoFrameMetadataListener(
|
private void setVideoFrameMetadataListener(
|
||||||
VideoFrameMetadataListener videoFrameMetadataListener) {
|
VideoFrameMetadataListener videoFrameMetadataListener) {
|
||||||
|
this.videoFrameMetadataListener = videoFrameMetadataListener;
|
||||||
defaultVideoSink.setVideoFrameMetadataListener(videoFrameMetadataListener);
|
defaultVideoSink.setVideoFrameMetadataListener(videoFrameMetadataListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ package androidx.media3.exoplayer.video;
|
|||||||
|
|
||||||
import android.media.MediaFormat;
|
import android.media.MediaFormat;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.util.UnstableApi;
|
import androidx.media3.common.util.UnstableApi;
|
||||||
|
|
||||||
@ -28,7 +29,8 @@ public interface VideoFrameMetadataListener {
|
|||||||
*
|
*
|
||||||
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
||||||
* @param releaseTimeNs The system time at which the frame should be displayed, in nanoseconds.
|
* @param releaseTimeNs The system time at which the frame should be displayed, in nanoseconds.
|
||||||
* Can be compared to {@link System#nanoTime()}.
|
* Can be compared to {@link System#nanoTime()}. It will be {@link C#TIME_UNSET}, if the frame
|
||||||
|
* is rendered immediately automatically, this is typically the last frame that is rendered.
|
||||||
* @param format The format associated with the frame.
|
* @param format The format associated with the frame.
|
||||||
* @param mediaFormat The framework media format associated with the frame, or {@code null} if not
|
* @param mediaFormat The framework media format associated with the frame, or {@code null} if not
|
||||||
* known or not applicable (e.g., because the frame was not output by a {@link
|
* known or not applicable (e.g., because the frame was not output by a {@link
|
||||||
|
@ -306,7 +306,8 @@ public final class VideoFrameProcessorTestRunner {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||||
// Do nothing as frames are rendered automatically.
|
// Do nothing as frames are rendered automatically.
|
||||||
onOutputFrameAvailableForRenderingListener.onFrameAvailableForRendering(
|
onOutputFrameAvailableForRenderingListener.onFrameAvailableForRendering(
|
||||||
presentationTimeUs);
|
presentationTimeUs);
|
||||||
|
@ -1072,8 +1072,9 @@ public class CompositionPlayerSeekTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
listener.onOutputFrameAvailableForRendering(framePresentationTimeUs);
|
long framePresentationTimeUs, boolean isRedrawnFrame) {
|
||||||
|
listener.onOutputFrameAvailableForRendering(framePresentationTimeUs, isRedrawnFrame);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -570,7 +570,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {
|
public void onOutputFrameAvailableForRendering(
|
||||||
|
long framePresentationTimeUs, boolean isRedrawnFrame) {
|
||||||
if (!renderFramesAutomatically) {
|
if (!renderFramesAutomatically) {
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
framesAvailableToRender += 1;
|
framesAvailableToRender += 1;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user