Allow stream offset passed to GlEffectsFrameProcessor to change.
This is needed for applying effects to a playlist. The effects are applied based on the presentation time of the frame in its corresponding media item and the offset is added back before encoding. Each time the offset changes, end of input stream is signalled to the texture processors. This is needed because the texture processors can expect monotonically increasing timestamp within the same input stream but when the offset changes, the timstamps jump back to 0. PiperOrigin-RevId: 462714966
This commit is contained in:
parent
dd2c16bc45
commit
30e5bc9837
@ -133,6 +133,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
acceptedFrame = false;
|
acceptedFrame = false;
|
||||||
AppTextureFrame appTextureFrame =
|
AppTextureFrame appTextureFrame =
|
||||||
new AppTextureFrame(inputTexture.texId, inputTexture.width, inputTexture.height);
|
new AppTextureFrame(inputTexture.texId, inputTexture.width, inputTexture.height);
|
||||||
|
// TODO(b/238302213): Handle timestamps restarting from 0 when applying effects to a playlist.
|
||||||
|
// MediaPipe will fail if the timestamps are not monotonically increasing.
|
||||||
appTextureFrame.setTimestamp(presentationTimeUs);
|
appTextureFrame.setTimestamp(presentationTimeUs);
|
||||||
checkStateNotNull(frameProcessor).onNewFrame(appTextureFrame);
|
checkStateNotNull(frameProcessor).onNewFrame(appTextureFrame);
|
||||||
try {
|
try {
|
||||||
@ -167,10 +169,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final void signalEndOfInputStream() {
|
public final void signalEndOfCurrentInputStream() {
|
||||||
frameProcessor.waitUntilIdle();
|
frameProcessor.waitUntilIdle();
|
||||||
if (listener != null) {
|
if (listener != null) {
|
||||||
listener.onOutputStreamEnded();
|
listener.onCurrentOutputStreamEnded();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -379,12 +379,11 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
frameProcessingEnded = true;
|
frameProcessingEnded = true;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
/* streamOffsetUs= */ 0L,
|
|
||||||
effects,
|
effects,
|
||||||
DebugViewProvider.NONE,
|
DebugViewProvider.NONE,
|
||||||
/* useHdr= */ false));
|
/* useHdr= */ false));
|
||||||
glEffectsFrameProcessor.setInputFrameInfo(
|
glEffectsFrameProcessor.setInputFrameInfo(
|
||||||
new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio));
|
new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio, /* streamOffsetUs= */ 0));
|
||||||
glEffectsFrameProcessor.registerInputFrame();
|
glEffectsFrameProcessor.registerInputFrame();
|
||||||
|
|
||||||
// Queue the first video frame from the extractor.
|
// Queue the first video frame from the extractor.
|
||||||
@ -435,7 +434,7 @@ public final class GlEffectsFrameProcessorPixelTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Bitmap processFirstFrameAndEnd() throws InterruptedException {
|
private Bitmap processFirstFrameAndEnd() throws InterruptedException {
|
||||||
checkNotNull(glEffectsFrameProcessor).signalEndOfInputStream();
|
checkNotNull(glEffectsFrameProcessor).signalEndOfInput();
|
||||||
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
|
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
|
||||||
assertThat(frameProcessingEnded).isTrue();
|
assertThat(frameProcessingEnded).isTrue();
|
||||||
assertThat(frameProcessingException.get()).isNull();
|
assertThat(frameProcessingException.get()).isNull();
|
||||||
|
@ -92,9 +92,9 @@ import java.util.Queue;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputStreamEnded() {
|
public void onCurrentOutputStreamEnded() {
|
||||||
if (nextGlTextureProcessor != null) {
|
if (nextGlTextureProcessor != null) {
|
||||||
frameProcessingTaskExecutor.submit(nextGlTextureProcessor::signalEndOfInputStream);
|
frameProcessingTaskExecutor.submit(nextGlTextureProcessor::signalEndOfCurrentInputStream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,6 +37,8 @@ import androidx.media3.common.util.GlUtil;
|
|||||||
import androidx.media3.common.util.Log;
|
import androidx.media3.common.util.Log;
|
||||||
import androidx.media3.common.util.Util;
|
import androidx.media3.common.util.Util;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import java.util.ArrayDeque;
|
||||||
|
import java.util.Queue;
|
||||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
|
|
||||||
@ -60,12 +62,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private final ImmutableList<GlMatrixTransformation> matrixTransformations;
|
private final ImmutableList<GlMatrixTransformation> matrixTransformations;
|
||||||
private final EGLDisplay eglDisplay;
|
private final EGLDisplay eglDisplay;
|
||||||
private final EGLContext eglContext;
|
private final EGLContext eglContext;
|
||||||
private final long streamOffsetUs;
|
|
||||||
private final DebugViewProvider debugViewProvider;
|
private final DebugViewProvider debugViewProvider;
|
||||||
private final FrameProcessor.Listener frameProcessorListener;
|
private final FrameProcessor.Listener frameProcessorListener;
|
||||||
private final boolean sampleFromExternalTexture;
|
private final boolean sampleFromExternalTexture;
|
||||||
private final boolean useHdr;
|
private final boolean useHdr;
|
||||||
private final float[] textureTransformMatrix;
|
private final float[] textureTransformMatrix;
|
||||||
|
private final Queue<Long> streamOffsetUsQueue;
|
||||||
|
|
||||||
private int inputWidth;
|
private int inputWidth;
|
||||||
private int inputHeight;
|
private int inputHeight;
|
||||||
@ -89,7 +91,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
EGLDisplay eglDisplay,
|
EGLDisplay eglDisplay,
|
||||||
EGLContext eglContext,
|
EGLContext eglContext,
|
||||||
ImmutableList<GlMatrixTransformation> matrixTransformations,
|
ImmutableList<GlMatrixTransformation> matrixTransformations,
|
||||||
long streamOffsetUs,
|
|
||||||
FrameProcessor.Listener frameProcessorListener,
|
FrameProcessor.Listener frameProcessorListener,
|
||||||
DebugViewProvider debugViewProvider,
|
DebugViewProvider debugViewProvider,
|
||||||
boolean sampleFromExternalTexture,
|
boolean sampleFromExternalTexture,
|
||||||
@ -98,7 +99,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
this.matrixTransformations = matrixTransformations;
|
this.matrixTransformations = matrixTransformations;
|
||||||
this.eglDisplay = eglDisplay;
|
this.eglDisplay = eglDisplay;
|
||||||
this.eglContext = eglContext;
|
this.eglContext = eglContext;
|
||||||
this.streamOffsetUs = streamOffsetUs;
|
|
||||||
this.debugViewProvider = debugViewProvider;
|
this.debugViewProvider = debugViewProvider;
|
||||||
this.frameProcessorListener = frameProcessorListener;
|
this.frameProcessorListener = frameProcessorListener;
|
||||||
this.sampleFromExternalTexture = sampleFromExternalTexture;
|
this.sampleFromExternalTexture = sampleFromExternalTexture;
|
||||||
@ -106,6 +106,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
|
|
||||||
textureTransformMatrix = new float[16];
|
textureTransformMatrix = new float[16];
|
||||||
Matrix.setIdentityM(textureTransformMatrix, /* smOffset= */ 0);
|
Matrix.setIdentityM(textureTransformMatrix, /* smOffset= */ 0);
|
||||||
|
streamOffsetUsQueue = new ArrayDeque<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -122,6 +123,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
|
public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) {
|
||||||
|
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream specified.");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
if (!ensureConfigured(inputTexture.width, inputTexture.height)) {
|
if (!ensureConfigured(inputTexture.width, inputTexture.height)) {
|
||||||
@ -144,7 +147,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
EGLExt.eglPresentationTimeANDROID(
|
EGLExt.eglPresentationTimeANDROID(
|
||||||
eglDisplay,
|
eglDisplay,
|
||||||
outputEglSurface,
|
outputEglSurface,
|
||||||
/* presentationTimeNs= */ (presentationTimeUs + streamOffsetUs) * 1000);
|
/* presentationTimeNs= */ (presentationTimeUs + streamOffsetUsQueue.element()) * 1000);
|
||||||
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
|
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
|
||||||
}
|
}
|
||||||
} catch (FrameProcessingException | GlUtil.GlException e) {
|
} catch (FrameProcessingException | GlUtil.GlException e) {
|
||||||
@ -270,8 +273,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void signalEndOfInputStream() {
|
public void signalEndOfCurrentInputStream() {
|
||||||
frameProcessorListener.onFrameProcessingEnded();
|
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
|
||||||
|
|
||||||
|
streamOffsetUsQueue.remove();
|
||||||
|
if (streamOffsetUsQueue.isEmpty()) {
|
||||||
|
frameProcessorListener.onFrameProcessingEnded();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -296,6 +304,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Signals that there will be another input stream after all previously appended input streams
|
||||||
|
* have {@linkplain #signalEndOfCurrentInputStream() ended}.
|
||||||
|
*
|
||||||
|
* @param streamOffsetUs The presentation timestamp offset, in microseconds.
|
||||||
|
*/
|
||||||
|
public void appendStream(long streamOffsetUs) {
|
||||||
|
streamOffsetUsQueue.add(streamOffsetUs);
|
||||||
|
}
|
||||||
|
|
||||||
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
|
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
|
||||||
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
|
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
|
||||||
this.outputSurfaceInfo = outputSurfaceInfo;
|
this.outputSurfaceInfo = outputSurfaceInfo;
|
||||||
|
@ -25,15 +25,33 @@ import static androidx.media3.common.util.Assertions.checkArgument;
|
|||||||
public final int height;
|
public final int height;
|
||||||
/** The ratio of width over height for each pixel. */
|
/** The ratio of width over height for each pixel. */
|
||||||
public final float pixelWidthHeightRatio;
|
public final float pixelWidthHeightRatio;
|
||||||
|
/**
|
||||||
|
* An offset in microseconds that is part of the input timestamps and should be ignored for
|
||||||
|
* processing but added back to the output timestamps.
|
||||||
|
*
|
||||||
|
* <p>The offset stays constant within a stream but changes in between streams to ensure that
|
||||||
|
* frame timestamps are always monotonically increasing.
|
||||||
|
*/
|
||||||
|
public final long streamOffsetUs;
|
||||||
|
|
||||||
// TODO(b/227624622): Add color space information for HDR.
|
// TODO(b/227624622): Add color space information for HDR.
|
||||||
|
|
||||||
public FrameInfo(int width, int height, float pixelWidthHeightRatio) {
|
/**
|
||||||
|
* Creates a new instance.
|
||||||
|
*
|
||||||
|
* @param width The width of the frame, in pixels.
|
||||||
|
* @param height The height of the frame, in pixels.
|
||||||
|
* @param pixelWidthHeightRatio The ratio of width over height for each pixel.
|
||||||
|
* @param streamOffsetUs An offset in microseconds that is part of the input timestamps and should
|
||||||
|
* be ignored for processing but added back to the output timestamps.
|
||||||
|
*/
|
||||||
|
public FrameInfo(int width, int height, float pixelWidthHeightRatio, long streamOffsetUs) {
|
||||||
checkArgument(width > 0, "width must be positive, but is: " + width);
|
checkArgument(width > 0, "width must be positive, but is: " + width);
|
||||||
checkArgument(height > 0, "height must be positive, but is: " + height);
|
checkArgument(height > 0, "height must be positive, but is: " + height);
|
||||||
|
|
||||||
this.width = width;
|
this.width = width;
|
||||||
this.height = height;
|
this.height = height;
|
||||||
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
|
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
|
||||||
|
this.streamOffsetUs = streamOffsetUs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,6 +58,9 @@ import androidx.annotation.Nullable;
|
|||||||
*
|
*
|
||||||
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
|
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
|
||||||
* frames' pixels have a ratio of 1.
|
* frames' pixels have a ratio of 1.
|
||||||
|
*
|
||||||
|
* <p>The caller should update {@link FrameInfo#streamOffsetUs} when switching input streams to
|
||||||
|
* ensure that frame timestamps are always monotonically increasing.
|
||||||
*/
|
*/
|
||||||
void setInputFrameInfo(FrameInfo inputFrameInfo);
|
void setInputFrameInfo(FrameInfo inputFrameInfo);
|
||||||
|
|
||||||
@ -66,8 +69,8 @@ import androidx.annotation.Nullable;
|
|||||||
*
|
*
|
||||||
* <p>Must be called before rendering a frame to the frame processor's input surface.
|
* <p>Must be called before rendering a frame to the frame processor's input surface.
|
||||||
*
|
*
|
||||||
* @throws IllegalStateException If called after {@link #signalEndOfInputStream()} or before
|
* @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link
|
||||||
* {@link #setInputFrameInfo(FrameInfo)}.
|
* #setInputFrameInfo(FrameInfo)}.
|
||||||
*/
|
*/
|
||||||
void registerInputFrame();
|
void registerInputFrame();
|
||||||
|
|
||||||
@ -99,7 +102,7 @@ import androidx.annotation.Nullable;
|
|||||||
*
|
*
|
||||||
* @throws IllegalStateException If called more than once.
|
* @throws IllegalStateException If called more than once.
|
||||||
*/
|
*/
|
||||||
void signalEndOfInputStream();
|
void signalEndOfInput();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Releases all resources.
|
* Releases all resources.
|
||||||
|
@ -62,7 +62,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
public static GlEffectsFrameProcessor create(
|
public static GlEffectsFrameProcessor create(
|
||||||
Context context,
|
Context context,
|
||||||
FrameProcessor.Listener listener,
|
FrameProcessor.Listener listener,
|
||||||
long streamOffsetUs,
|
|
||||||
List<GlEffect> effects,
|
List<GlEffect> effects,
|
||||||
DebugViewProvider debugViewProvider,
|
DebugViewProvider debugViewProvider,
|
||||||
boolean useHdr)
|
boolean useHdr)
|
||||||
@ -76,7 +75,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
createOpenGlObjectsAndFrameProcessor(
|
createOpenGlObjectsAndFrameProcessor(
|
||||||
context,
|
context,
|
||||||
listener,
|
listener,
|
||||||
streamOffsetUs,
|
|
||||||
effects,
|
effects,
|
||||||
debugViewProvider,
|
debugViewProvider,
|
||||||
useHdr,
|
useHdr,
|
||||||
@ -104,7 +102,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor(
|
private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor(
|
||||||
Context context,
|
Context context,
|
||||||
FrameProcessor.Listener listener,
|
FrameProcessor.Listener listener,
|
||||||
long streamOffsetUs,
|
|
||||||
List<GlEffect> effects,
|
List<GlEffect> effects,
|
||||||
DebugViewProvider debugViewProvider,
|
DebugViewProvider debugViewProvider,
|
||||||
boolean useHdr,
|
boolean useHdr,
|
||||||
@ -129,14 +126,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
|
|
||||||
ImmutableList<GlTextureProcessor> textureProcessors =
|
ImmutableList<GlTextureProcessor> textureProcessors =
|
||||||
getGlTextureProcessorsForGlEffects(
|
getGlTextureProcessorsForGlEffects(
|
||||||
context,
|
context, effects, eglDisplay, eglContext, listener, debugViewProvider, useHdr);
|
||||||
effects,
|
|
||||||
eglDisplay,
|
|
||||||
eglContext,
|
|
||||||
streamOffsetUs,
|
|
||||||
listener,
|
|
||||||
debugViewProvider,
|
|
||||||
useHdr);
|
|
||||||
FrameProcessingTaskExecutor frameProcessingTaskExecutor =
|
FrameProcessingTaskExecutor frameProcessingTaskExecutor =
|
||||||
new FrameProcessingTaskExecutor(singleThreadExecutorService, listener);
|
new FrameProcessingTaskExecutor(singleThreadExecutorService, listener);
|
||||||
chainTextureProcessorsWithListeners(textureProcessors, frameProcessingTaskExecutor, listener);
|
chainTextureProcessorsWithListeners(textureProcessors, frameProcessingTaskExecutor, listener);
|
||||||
@ -145,7 +135,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
eglDisplay,
|
eglDisplay,
|
||||||
eglContext,
|
eglContext,
|
||||||
frameProcessingTaskExecutor,
|
frameProcessingTaskExecutor,
|
||||||
streamOffsetUs,
|
|
||||||
/* inputExternalTextureId= */ GlUtil.createExternalTexture(),
|
/* inputExternalTextureId= */ GlUtil.createExternalTexture(),
|
||||||
textureProcessors);
|
textureProcessors);
|
||||||
}
|
}
|
||||||
@ -164,7 +153,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
List<GlEffect> effects,
|
List<GlEffect> effects,
|
||||||
EGLDisplay eglDisplay,
|
EGLDisplay eglDisplay,
|
||||||
EGLContext eglContext,
|
EGLContext eglContext,
|
||||||
long streamOffsetUs,
|
|
||||||
FrameProcessor.Listener listener,
|
FrameProcessor.Listener listener,
|
||||||
DebugViewProvider debugViewProvider,
|
DebugViewProvider debugViewProvider,
|
||||||
boolean useHdr)
|
boolean useHdr)
|
||||||
@ -201,7 +189,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
eglDisplay,
|
eglDisplay,
|
||||||
eglContext,
|
eglContext,
|
||||||
matrixTransformationListBuilder.build(),
|
matrixTransformationListBuilder.build(),
|
||||||
streamOffsetUs,
|
|
||||||
listener,
|
listener,
|
||||||
debugViewProvider,
|
debugViewProvider,
|
||||||
sampleFromExternalTexture,
|
sampleFromExternalTexture,
|
||||||
@ -242,11 +229,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private final EGLDisplay eglDisplay;
|
private final EGLDisplay eglDisplay;
|
||||||
private final EGLContext eglContext;
|
private final EGLContext eglContext;
|
||||||
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
|
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
|
||||||
/**
|
|
||||||
* Offset compared to original media presentation time that has been added to incoming frame
|
|
||||||
* timestamps, in microseconds.
|
|
||||||
*/
|
|
||||||
private final long streamOffsetUs;
|
|
||||||
|
|
||||||
/** Associated with an OpenGL external texture. */
|
/** Associated with an OpenGL external texture. */
|
||||||
private final SurfaceTexture inputSurfaceTexture;
|
private final SurfaceTexture inputSurfaceTexture;
|
||||||
@ -266,19 +248,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
// Fields accessed on the frameProcessingTaskExecutor's thread.
|
// Fields accessed on the frameProcessingTaskExecutor's thread.
|
||||||
private boolean inputTextureInUse;
|
private boolean inputTextureInUse;
|
||||||
private boolean inputStreamEnded;
|
private boolean inputStreamEnded;
|
||||||
|
/**
|
||||||
|
* Offset compared to original media presentation time that has been added to incoming frame
|
||||||
|
* timestamps, in microseconds.
|
||||||
|
*/
|
||||||
|
private long previousStreamOffsetUs;
|
||||||
|
|
||||||
private GlEffectsFrameProcessor(
|
private GlEffectsFrameProcessor(
|
||||||
EGLDisplay eglDisplay,
|
EGLDisplay eglDisplay,
|
||||||
EGLContext eglContext,
|
EGLContext eglContext,
|
||||||
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
|
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
|
||||||
long streamOffsetUs,
|
|
||||||
int inputExternalTextureId,
|
int inputExternalTextureId,
|
||||||
ImmutableList<GlTextureProcessor> textureProcessors) {
|
ImmutableList<GlTextureProcessor> textureProcessors) {
|
||||||
|
|
||||||
this.eglDisplay = eglDisplay;
|
this.eglDisplay = eglDisplay;
|
||||||
this.eglContext = eglContext;
|
this.eglContext = eglContext;
|
||||||
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
|
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
|
||||||
this.streamOffsetUs = streamOffsetUs;
|
|
||||||
this.inputExternalTextureId = inputExternalTextureId;
|
this.inputExternalTextureId = inputExternalTextureId;
|
||||||
|
|
||||||
checkState(!textureProcessors.isEmpty());
|
checkState(!textureProcessors.isEmpty());
|
||||||
@ -293,6 +278,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
inputSurface = new Surface(inputSurfaceTexture);
|
inputSurface = new Surface(inputSurfaceTexture);
|
||||||
inputSurfaceTextureTransformMatrix = new float[16];
|
inputSurfaceTextureTransformMatrix = new float[16];
|
||||||
pendingInputFrames = new ConcurrentLinkedQueue<>();
|
pendingInputFrames = new ConcurrentLinkedQueue<>();
|
||||||
|
previousStreamOffsetUs = C.TIME_UNSET;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -327,7 +313,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void signalEndOfInputStream() {
|
public void signalEndOfInput() {
|
||||||
checkState(!inputStreamEnded);
|
checkState(!inputStreamEnded);
|
||||||
inputStreamEnded = true;
|
inputStreamEnded = true;
|
||||||
frameProcessingTaskExecutor.submit(this::processEndOfInputStream);
|
frameProcessingTaskExecutor.submit(this::processEndOfInputStream);
|
||||||
@ -363,7 +349,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
inputTextureInUse = true;
|
inputTextureInUse = true;
|
||||||
inputSurfaceTexture.updateTexImage();
|
inputSurfaceTexture.updateTexImage();
|
||||||
inputSurfaceTexture.getTransformMatrix(inputSurfaceTextureTransformMatrix);
|
inputSurfaceTexture.getTransformMatrix(inputSurfaceTextureTransformMatrix);
|
||||||
queueInputFrameToTextureProcessors();
|
inputExternalTextureProcessor.setTextureTransformMatrix(inputSurfaceTextureTransformMatrix);
|
||||||
|
long inputFrameTimeNs = inputSurfaceTexture.getTimestamp();
|
||||||
|
long streamOffsetUs = checkStateNotNull(pendingInputFrames.peek()).streamOffsetUs;
|
||||||
|
if (streamOffsetUs != previousStreamOffsetUs) {
|
||||||
|
if (previousStreamOffsetUs != C.TIME_UNSET) {
|
||||||
|
inputExternalTextureProcessor.signalEndOfCurrentInputStream();
|
||||||
|
}
|
||||||
|
finalTextureProcessorWrapper.appendStream(streamOffsetUs);
|
||||||
|
previousStreamOffsetUs = streamOffsetUs;
|
||||||
|
}
|
||||||
|
// Correct for the stream offset so processors see original media presentation timestamps.
|
||||||
|
long presentationTimeUs = inputFrameTimeNs / 1000 - streamOffsetUs;
|
||||||
|
queueInputFrameToTextureProcessors(presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -372,14 +370,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
* <p>This method must be called on the {@linkplain #THREAD_NAME background thread}.
|
* <p>This method must be called on the {@linkplain #THREAD_NAME background thread}.
|
||||||
*/
|
*/
|
||||||
@WorkerThread
|
@WorkerThread
|
||||||
private void queueInputFrameToTextureProcessors() {
|
private void queueInputFrameToTextureProcessors(long presentationTimeUs) {
|
||||||
checkState(Thread.currentThread().getName().equals(THREAD_NAME));
|
checkState(Thread.currentThread().getName().equals(THREAD_NAME));
|
||||||
checkState(inputTextureInUse);
|
checkState(inputTextureInUse);
|
||||||
|
|
||||||
long inputFrameTimeNs = inputSurfaceTexture.getTimestamp();
|
|
||||||
// Correct for the stream offset so processors see original media presentation timestamps.
|
|
||||||
long presentationTimeUs = inputFrameTimeNs / 1000 - streamOffsetUs;
|
|
||||||
inputExternalTextureProcessor.setTextureTransformMatrix(inputSurfaceTextureTransformMatrix);
|
|
||||||
FrameInfo inputFrameInfo = checkStateNotNull(pendingInputFrames.peek());
|
FrameInfo inputFrameInfo = checkStateNotNull(pendingInputFrames.peek());
|
||||||
if (inputExternalTextureProcessor.maybeQueueInputFrame(
|
if (inputExternalTextureProcessor.maybeQueueInputFrame(
|
||||||
new TextureInfo(
|
new TextureInfo(
|
||||||
@ -394,7 +388,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
// asynchronously by the texture processors chained after it.
|
// asynchronously by the texture processors chained after it.
|
||||||
} else {
|
} else {
|
||||||
// Try again later.
|
// Try again later.
|
||||||
frameProcessingTaskExecutor.submit(this::queueInputFrameToTextureProcessors);
|
frameProcessingTaskExecutor.submit(
|
||||||
|
() -> queueInputFrameToTextureProcessors(presentationTimeUs));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -408,12 +403,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
return new FrameInfo(
|
return new FrameInfo(
|
||||||
(int) (frameInfo.width * frameInfo.pixelWidthHeightRatio),
|
(int) (frameInfo.width * frameInfo.pixelWidthHeightRatio),
|
||||||
frameInfo.height,
|
frameInfo.height,
|
||||||
/* pixelWidthHeightRatio= */ 1);
|
/* pixelWidthHeightRatio= */ 1,
|
||||||
|
frameInfo.streamOffsetUs);
|
||||||
} else if (frameInfo.pixelWidthHeightRatio < 1f) {
|
} else if (frameInfo.pixelWidthHeightRatio < 1f) {
|
||||||
return new FrameInfo(
|
return new FrameInfo(
|
||||||
frameInfo.width,
|
frameInfo.width,
|
||||||
(int) (frameInfo.height / frameInfo.pixelWidthHeightRatio),
|
(int) (frameInfo.height / frameInfo.pixelWidthHeightRatio),
|
||||||
/* pixelWidthHeightRatio= */ 1);
|
/* pixelWidthHeightRatio= */ 1,
|
||||||
|
frameInfo.streamOffsetUs);
|
||||||
} else {
|
} else {
|
||||||
return frameInfo;
|
return frameInfo;
|
||||||
}
|
}
|
||||||
@ -429,7 +426,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
private void processEndOfInputStream() {
|
private void processEndOfInputStream() {
|
||||||
if (getPendingInputFrameCount() == 0) {
|
if (getPendingInputFrameCount() == 0) {
|
||||||
// Propagates the end of stream signal through the chained texture processors.
|
// Propagates the end of stream signal through the chained texture processors.
|
||||||
inputExternalTextureProcessor.signalEndOfInputStream();
|
inputExternalTextureProcessor.signalEndOfCurrentInputStream();
|
||||||
} else {
|
} else {
|
||||||
frameProcessingTaskExecutor.submit(this::processEndOfInputStream);
|
frameProcessingTaskExecutor.submit(this::processEndOfInputStream);
|
||||||
}
|
}
|
||||||
|
@ -72,8 +72,11 @@ public interface GlTextureProcessor {
|
|||||||
*/
|
*/
|
||||||
void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs);
|
void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs);
|
||||||
|
|
||||||
/** Called when the {@link GlTextureProcessor} will not produce further output frames. */
|
/**
|
||||||
void onOutputStreamEnded();
|
* Called when the {@link GlTextureProcessor} will not produce further output frames belonging
|
||||||
|
* to the current output stream.
|
||||||
|
*/
|
||||||
|
void onCurrentOutputStreamEnded();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Called when an exception occurs during asynchronous frame processing.
|
* Called when an exception occurs during asynchronous frame processing.
|
||||||
@ -110,8 +113,15 @@ public interface GlTextureProcessor {
|
|||||||
*/
|
*/
|
||||||
void releaseOutputFrame(TextureInfo outputTexture);
|
void releaseOutputFrame(TextureInfo outputTexture);
|
||||||
|
|
||||||
/** Notifies the texture processor that no further input frames will become available. */
|
/**
|
||||||
void signalEndOfInputStream();
|
* Notifies the {@code GlTextureProcessor} that no further input frames belonging to the current
|
||||||
|
* input stream will be queued.
|
||||||
|
*
|
||||||
|
* <p>Input frames that are queued after this method is called belong to a different input stream,
|
||||||
|
* so presentation timestamps may reset to start from a smaller presentation timestamp than the
|
||||||
|
* last frame of the previous input stream.
|
||||||
|
*/
|
||||||
|
void signalEndOfCurrentInputStream();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Releases all resources.
|
* Releases all resources.
|
||||||
|
@ -142,9 +142,9 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final void signalEndOfInputStream() {
|
public final void signalEndOfCurrentInputStream() {
|
||||||
if (listener != null) {
|
if (listener != null) {
|
||||||
listener.onOutputStreamEnded();
|
listener.onCurrentOutputStreamEnded();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,7 +131,6 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
streamOffsetUs,
|
|
||||||
effectsListBuilder.build(),
|
effectsListBuilder.build(),
|
||||||
debugViewProvider,
|
debugViewProvider,
|
||||||
// HDR is only used if the MediaCodec encoder supports FEATURE_HdrEditing. This
|
// HDR is only used if the MediaCodec encoder supports FEATURE_HdrEditing. This
|
||||||
@ -143,7 +142,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
e, TransformationException.ERROR_CODE_GL_INIT_FAILED);
|
e, TransformationException.ERROR_CODE_GL_INIT_FAILED);
|
||||||
}
|
}
|
||||||
frameProcessor.setInputFrameInfo(
|
frameProcessor.setInputFrameInfo(
|
||||||
new FrameInfo(decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio));
|
new FrameInfo(
|
||||||
|
decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio, streamOffsetUs));
|
||||||
|
|
||||||
boolean isToneMappingRequired =
|
boolean isToneMappingRequired =
|
||||||
ColorInfo.isHdr(inputFormat.colorInfo) && !encoderWrapper.isHdrEditingEnabled();
|
ColorInfo.isHdr(inputFormat.colorInfo) && !encoderWrapper.isHdrEditingEnabled();
|
||||||
@ -178,7 +178,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
processedData = true;
|
processedData = true;
|
||||||
}
|
}
|
||||||
if (decoder.isEnded()) {
|
if (decoder.isEnded()) {
|
||||||
frameProcessor.signalEndOfInputStream();
|
frameProcessor.signalEndOfInput();
|
||||||
}
|
}
|
||||||
// If the decoder produced output, signal that it may be possible to process data again.
|
// If the decoder produced output, signal that it may be possible to process data again.
|
||||||
return processedData;
|
return processedData;
|
||||||
|
@ -125,10 +125,10 @@ public final class ChainingGlTextureProcessorListenerTest {
|
|||||||
@Test
|
@Test
|
||||||
public void onOutputStreamEnded_signalsInputStreamEndedToNextGlTextureProcessor()
|
public void onOutputStreamEnded_signalsInputStreamEndedToNextGlTextureProcessor()
|
||||||
throws InterruptedException {
|
throws InterruptedException {
|
||||||
chainingGlTextureProcessorListener.onOutputStreamEnded();
|
chainingGlTextureProcessorListener.onCurrentOutputStreamEnded();
|
||||||
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
|
Thread.sleep(EXECUTOR_WAIT_TIME_MS);
|
||||||
|
|
||||||
verify(fakeNextGlTextureProcessor, times(1)).signalEndOfInputStream();
|
verify(fakeNextGlTextureProcessor, times(1)).signalEndOfCurrentInputStream();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class FakeGlTextureProcessor implements GlTextureProcessor {
|
private static class FakeGlTextureProcessor implements GlTextureProcessor {
|
||||||
@ -155,7 +155,7 @@ public final class ChainingGlTextureProcessorListenerTest {
|
|||||||
public void releaseOutputFrame(TextureInfo outputTexture) {}
|
public void releaseOutputFrame(TextureInfo outputTexture) {}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void signalEndOfInputStream() {}
|
public void signalEndOfCurrentInputStream() {}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {}
|
public void release() {}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user