From 30e5bc9837e2423cd2bb426c5797211e0f6ad76b Mon Sep 17 00:00:00 2001 From: hschlueter Date: Fri, 22 Jul 2022 22:18:55 +0000 Subject: [PATCH] Allow stream offset passed to GlEffectsFrameProcessor to change. This is needed for applying effects to a playlist. The effects are applied based on the presentation time of the frame in its corresponding media item and the offset is added back before encoding. Each time the offset changes, end of input stream is signalled to the texture processors. This is needed because the texture processors can expect monotonically increasing timestamp within the same input stream but when the offset changes, the timstamps jump back to 0. PiperOrigin-RevId: 462714966 --- .../demo/transformer/MediaPipeProcessor.java | 6 +- .../GlEffectsFrameProcessorPixelTest.java | 5 +- .../ChainingGlTextureProcessorListener.java | 4 +- ...lMatrixTransformationProcessorWrapper.java | 30 +++++++-- .../media3/transformer/FrameInfo.java | 20 +++++- .../media3/transformer/FrameProcessor.java | 9 ++- .../transformer/GlEffectsFrameProcessor.java | 61 +++++++++---------- .../transformer/GlTextureProcessor.java | 18 ++++-- .../SingleFrameGlTextureProcessor.java | 4 +- .../VideoTranscodingSamplePipeline.java | 6 +- ...hainingGlTextureProcessorListenerTest.java | 6 +- 11 files changed, 108 insertions(+), 61 deletions(-) diff --git a/demos/transformer/src/withMediaPipe/java/androidx/media3/demo/transformer/MediaPipeProcessor.java b/demos/transformer/src/withMediaPipe/java/androidx/media3/demo/transformer/MediaPipeProcessor.java index 538e377c56..48e2764daf 100644 --- a/demos/transformer/src/withMediaPipe/java/androidx/media3/demo/transformer/MediaPipeProcessor.java +++ b/demos/transformer/src/withMediaPipe/java/androidx/media3/demo/transformer/MediaPipeProcessor.java @@ -133,6 +133,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; acceptedFrame = false; AppTextureFrame appTextureFrame = new AppTextureFrame(inputTexture.texId, inputTexture.width, inputTexture.height); + // TODO(b/238302213): Handle timestamps restarting from 0 when applying effects to a playlist. + // MediaPipe will fail if the timestamps are not monotonically increasing. appTextureFrame.setTimestamp(presentationTimeUs); checkStateNotNull(frameProcessor).onNewFrame(appTextureFrame); try { @@ -167,10 +169,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; } @Override - public final void signalEndOfInputStream() { + public final void signalEndOfCurrentInputStream() { frameProcessor.waitUntilIdle(); if (listener != null) { - listener.onOutputStreamEnded(); + listener.onCurrentOutputStreamEnded(); } } diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/GlEffectsFrameProcessorPixelTest.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/GlEffectsFrameProcessorPixelTest.java index 28faa35dff..8298a7c5cb 100644 --- a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/GlEffectsFrameProcessorPixelTest.java +++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/GlEffectsFrameProcessorPixelTest.java @@ -379,12 +379,11 @@ public final class GlEffectsFrameProcessorPixelTest { frameProcessingEnded = true; } }, - /* streamOffsetUs= */ 0L, effects, DebugViewProvider.NONE, /* useHdr= */ false)); glEffectsFrameProcessor.setInputFrameInfo( - new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio)); + new FrameInfo(inputWidth, inputHeight, pixelWidthHeightRatio, /* streamOffsetUs= */ 0)); glEffectsFrameProcessor.registerInputFrame(); // Queue the first video frame from the extractor. @@ -435,7 +434,7 @@ public final class GlEffectsFrameProcessorPixelTest { } private Bitmap processFirstFrameAndEnd() throws InterruptedException { - checkNotNull(glEffectsFrameProcessor).signalEndOfInputStream(); + checkNotNull(glEffectsFrameProcessor).signalEndOfInput(); Thread.sleep(FRAME_PROCESSING_WAIT_MS); assertThat(frameProcessingEnded).isTrue(); assertThat(frameProcessingException.get()).isNull(); diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/ChainingGlTextureProcessorListener.java b/libraries/transformer/src/main/java/androidx/media3/transformer/ChainingGlTextureProcessorListener.java index db4120ea36..da2ace4cae 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/ChainingGlTextureProcessorListener.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/ChainingGlTextureProcessorListener.java @@ -92,9 +92,9 @@ import java.util.Queue; } @Override - public void onOutputStreamEnded() { + public void onCurrentOutputStreamEnded() { if (nextGlTextureProcessor != null) { - frameProcessingTaskExecutor.submit(nextGlTextureProcessor::signalEndOfInputStream); + frameProcessingTaskExecutor.submit(nextGlTextureProcessor::signalEndOfCurrentInputStream); } } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/FinalMatrixTransformationProcessorWrapper.java b/libraries/transformer/src/main/java/androidx/media3/transformer/FinalMatrixTransformationProcessorWrapper.java index 3a844df7a2..1c862ed4f2 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/FinalMatrixTransformationProcessorWrapper.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/FinalMatrixTransformationProcessorWrapper.java @@ -37,6 +37,8 @@ import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.Log; import androidx.media3.common.util.Util; import com.google.common.collect.ImmutableList; +import java.util.ArrayDeque; +import java.util.Queue; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; @@ -60,12 +62,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; private final ImmutableList matrixTransformations; private final EGLDisplay eglDisplay; private final EGLContext eglContext; - private final long streamOffsetUs; private final DebugViewProvider debugViewProvider; private final FrameProcessor.Listener frameProcessorListener; private final boolean sampleFromExternalTexture; private final boolean useHdr; private final float[] textureTransformMatrix; + private final Queue streamOffsetUsQueue; private int inputWidth; private int inputHeight; @@ -89,7 +91,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; EGLDisplay eglDisplay, EGLContext eglContext, ImmutableList matrixTransformations, - long streamOffsetUs, FrameProcessor.Listener frameProcessorListener, DebugViewProvider debugViewProvider, boolean sampleFromExternalTexture, @@ -98,7 +99,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; this.matrixTransformations = matrixTransformations; this.eglDisplay = eglDisplay; this.eglContext = eglContext; - this.streamOffsetUs = streamOffsetUs; this.debugViewProvider = debugViewProvider; this.frameProcessorListener = frameProcessorListener; this.sampleFromExternalTexture = sampleFromExternalTexture; @@ -106,6 +106,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; textureTransformMatrix = new float[16]; Matrix.setIdentityM(textureTransformMatrix, /* smOffset= */ 0); + streamOffsetUsQueue = new ArrayDeque<>(); } /** @@ -122,6 +123,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; @Override public boolean maybeQueueInputFrame(TextureInfo inputTexture, long presentationTimeUs) { + checkState(!streamOffsetUsQueue.isEmpty(), "No input stream specified."); + try { synchronized (this) { if (!ensureConfigured(inputTexture.width, inputTexture.height)) { @@ -144,7 +147,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; EGLExt.eglPresentationTimeANDROID( eglDisplay, outputEglSurface, - /* presentationTimeNs= */ (presentationTimeUs + streamOffsetUs) * 1000); + /* presentationTimeNs= */ (presentationTimeUs + streamOffsetUsQueue.element()) * 1000); EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); } } catch (FrameProcessingException | GlUtil.GlException e) { @@ -270,8 +273,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; } @Override - public void signalEndOfInputStream() { - frameProcessorListener.onFrameProcessingEnded(); + public void signalEndOfCurrentInputStream() { + checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end."); + + streamOffsetUsQueue.remove(); + if (streamOffsetUsQueue.isEmpty()) { + frameProcessorListener.onFrameProcessingEnded(); + } } @Override @@ -296,6 +304,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; } } + /** + * Signals that there will be another input stream after all previously appended input streams + * have {@linkplain #signalEndOfCurrentInputStream() ended}. + * + * @param streamOffsetUs The presentation timestamp offset, in microseconds. + */ + public void appendStream(long streamOffsetUs) { + streamOffsetUsQueue.add(streamOffsetUs); + } + public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) { this.outputSurfaceInfo = outputSurfaceInfo; diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/FrameInfo.java b/libraries/transformer/src/main/java/androidx/media3/transformer/FrameInfo.java index 8975f93383..d35c12fa0c 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/FrameInfo.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/FrameInfo.java @@ -25,15 +25,33 @@ import static androidx.media3.common.util.Assertions.checkArgument; public final int height; /** The ratio of width over height for each pixel. */ public final float pixelWidthHeightRatio; + /** + * An offset in microseconds that is part of the input timestamps and should be ignored for + * processing but added back to the output timestamps. + * + *

The offset stays constant within a stream but changes in between streams to ensure that + * frame timestamps are always monotonically increasing. + */ + public final long streamOffsetUs; // TODO(b/227624622): Add color space information for HDR. - public FrameInfo(int width, int height, float pixelWidthHeightRatio) { + /** + * Creates a new instance. + * + * @param width The width of the frame, in pixels. + * @param height The height of the frame, in pixels. + * @param pixelWidthHeightRatio The ratio of width over height for each pixel. + * @param streamOffsetUs An offset in microseconds that is part of the input timestamps and should + * be ignored for processing but added back to the output timestamps. + */ + public FrameInfo(int width, int height, float pixelWidthHeightRatio, long streamOffsetUs) { checkArgument(width > 0, "width must be positive, but is: " + width); checkArgument(height > 0, "height must be positive, but is: " + height); this.width = width; this.height = height; this.pixelWidthHeightRatio = pixelWidthHeightRatio; + this.streamOffsetUs = streamOffsetUs; } } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/FrameProcessor.java b/libraries/transformer/src/main/java/androidx/media3/transformer/FrameProcessor.java index 01802199d1..45c1835def 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/FrameProcessor.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/FrameProcessor.java @@ -58,6 +58,9 @@ import androidx.annotation.Nullable; * *

Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output * frames' pixels have a ratio of 1. + * + *

The caller should update {@link FrameInfo#streamOffsetUs} when switching input streams to + * ensure that frame timestamps are always monotonically increasing. */ void setInputFrameInfo(FrameInfo inputFrameInfo); @@ -66,8 +69,8 @@ import androidx.annotation.Nullable; * *

Must be called before rendering a frame to the frame processor's input surface. * - * @throws IllegalStateException If called after {@link #signalEndOfInputStream()} or before - * {@link #setInputFrameInfo(FrameInfo)}. + * @throws IllegalStateException If called after {@link #signalEndOfInput()} or before {@link + * #setInputFrameInfo(FrameInfo)}. */ void registerInputFrame(); @@ -99,7 +102,7 @@ import androidx.annotation.Nullable; * * @throws IllegalStateException If called more than once. */ - void signalEndOfInputStream(); + void signalEndOfInput(); /** * Releases all resources. diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/GlEffectsFrameProcessor.java b/libraries/transformer/src/main/java/androidx/media3/transformer/GlEffectsFrameProcessor.java index eed56123ab..c3f582291c 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/GlEffectsFrameProcessor.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/GlEffectsFrameProcessor.java @@ -62,7 +62,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; public static GlEffectsFrameProcessor create( Context context, FrameProcessor.Listener listener, - long streamOffsetUs, List effects, DebugViewProvider debugViewProvider, boolean useHdr) @@ -76,7 +75,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; createOpenGlObjectsAndFrameProcessor( context, listener, - streamOffsetUs, effects, debugViewProvider, useHdr, @@ -104,7 +102,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor( Context context, FrameProcessor.Listener listener, - long streamOffsetUs, List effects, DebugViewProvider debugViewProvider, boolean useHdr, @@ -129,14 +126,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ImmutableList textureProcessors = getGlTextureProcessorsForGlEffects( - context, - effects, - eglDisplay, - eglContext, - streamOffsetUs, - listener, - debugViewProvider, - useHdr); + context, effects, eglDisplay, eglContext, listener, debugViewProvider, useHdr); FrameProcessingTaskExecutor frameProcessingTaskExecutor = new FrameProcessingTaskExecutor(singleThreadExecutorService, listener); chainTextureProcessorsWithListeners(textureProcessors, frameProcessingTaskExecutor, listener); @@ -145,7 +135,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; eglDisplay, eglContext, frameProcessingTaskExecutor, - streamOffsetUs, /* inputExternalTextureId= */ GlUtil.createExternalTexture(), textureProcessors); } @@ -164,7 +153,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; List effects, EGLDisplay eglDisplay, EGLContext eglContext, - long streamOffsetUs, FrameProcessor.Listener listener, DebugViewProvider debugViewProvider, boolean useHdr) @@ -201,7 +189,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; eglDisplay, eglContext, matrixTransformationListBuilder.build(), - streamOffsetUs, listener, debugViewProvider, sampleFromExternalTexture, @@ -242,11 +229,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; private final EGLDisplay eglDisplay; private final EGLContext eglContext; private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; - /** - * Offset compared to original media presentation time that has been added to incoming frame - * timestamps, in microseconds. - */ - private final long streamOffsetUs; /** Associated with an OpenGL external texture. */ private final SurfaceTexture inputSurfaceTexture; @@ -266,19 +248,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; // Fields accessed on the frameProcessingTaskExecutor's thread. private boolean inputTextureInUse; private boolean inputStreamEnded; + /** + * Offset compared to original media presentation time that has been added to incoming frame + * timestamps, in microseconds. + */ + private long previousStreamOffsetUs; private GlEffectsFrameProcessor( EGLDisplay eglDisplay, EGLContext eglContext, FrameProcessingTaskExecutor frameProcessingTaskExecutor, - long streamOffsetUs, int inputExternalTextureId, ImmutableList textureProcessors) { this.eglDisplay = eglDisplay; this.eglContext = eglContext; this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; - this.streamOffsetUs = streamOffsetUs; this.inputExternalTextureId = inputExternalTextureId; checkState(!textureProcessors.isEmpty()); @@ -293,6 +278,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; inputSurface = new Surface(inputSurfaceTexture); inputSurfaceTextureTransformMatrix = new float[16]; pendingInputFrames = new ConcurrentLinkedQueue<>(); + previousStreamOffsetUs = C.TIME_UNSET; } @Override @@ -327,7 +313,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; } @Override - public void signalEndOfInputStream() { + public void signalEndOfInput() { checkState(!inputStreamEnded); inputStreamEnded = true; frameProcessingTaskExecutor.submit(this::processEndOfInputStream); @@ -363,7 +349,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; inputTextureInUse = true; inputSurfaceTexture.updateTexImage(); inputSurfaceTexture.getTransformMatrix(inputSurfaceTextureTransformMatrix); - queueInputFrameToTextureProcessors(); + inputExternalTextureProcessor.setTextureTransformMatrix(inputSurfaceTextureTransformMatrix); + long inputFrameTimeNs = inputSurfaceTexture.getTimestamp(); + long streamOffsetUs = checkStateNotNull(pendingInputFrames.peek()).streamOffsetUs; + if (streamOffsetUs != previousStreamOffsetUs) { + if (previousStreamOffsetUs != C.TIME_UNSET) { + inputExternalTextureProcessor.signalEndOfCurrentInputStream(); + } + finalTextureProcessorWrapper.appendStream(streamOffsetUs); + previousStreamOffsetUs = streamOffsetUs; + } + // Correct for the stream offset so processors see original media presentation timestamps. + long presentationTimeUs = inputFrameTimeNs / 1000 - streamOffsetUs; + queueInputFrameToTextureProcessors(presentationTimeUs); } /** @@ -372,14 +370,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; *

This method must be called on the {@linkplain #THREAD_NAME background thread}. */ @WorkerThread - private void queueInputFrameToTextureProcessors() { + private void queueInputFrameToTextureProcessors(long presentationTimeUs) { checkState(Thread.currentThread().getName().equals(THREAD_NAME)); checkState(inputTextureInUse); - long inputFrameTimeNs = inputSurfaceTexture.getTimestamp(); - // Correct for the stream offset so processors see original media presentation timestamps. - long presentationTimeUs = inputFrameTimeNs / 1000 - streamOffsetUs; - inputExternalTextureProcessor.setTextureTransformMatrix(inputSurfaceTextureTransformMatrix); FrameInfo inputFrameInfo = checkStateNotNull(pendingInputFrames.peek()); if (inputExternalTextureProcessor.maybeQueueInputFrame( new TextureInfo( @@ -394,7 +388,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; // asynchronously by the texture processors chained after it. } else { // Try again later. - frameProcessingTaskExecutor.submit(this::queueInputFrameToTextureProcessors); + frameProcessingTaskExecutor.submit( + () -> queueInputFrameToTextureProcessors(presentationTimeUs)); } } @@ -408,12 +403,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; return new FrameInfo( (int) (frameInfo.width * frameInfo.pixelWidthHeightRatio), frameInfo.height, - /* pixelWidthHeightRatio= */ 1); + /* pixelWidthHeightRatio= */ 1, + frameInfo.streamOffsetUs); } else if (frameInfo.pixelWidthHeightRatio < 1f) { return new FrameInfo( frameInfo.width, (int) (frameInfo.height / frameInfo.pixelWidthHeightRatio), - /* pixelWidthHeightRatio= */ 1); + /* pixelWidthHeightRatio= */ 1, + frameInfo.streamOffsetUs); } else { return frameInfo; } @@ -429,7 +426,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; private void processEndOfInputStream() { if (getPendingInputFrameCount() == 0) { // Propagates the end of stream signal through the chained texture processors. - inputExternalTextureProcessor.signalEndOfInputStream(); + inputExternalTextureProcessor.signalEndOfCurrentInputStream(); } else { frameProcessingTaskExecutor.submit(this::processEndOfInputStream); } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/GlTextureProcessor.java b/libraries/transformer/src/main/java/androidx/media3/transformer/GlTextureProcessor.java index 6021ed1be4..55cba646bd 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/GlTextureProcessor.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/GlTextureProcessor.java @@ -72,8 +72,11 @@ public interface GlTextureProcessor { */ void onOutputFrameAvailable(TextureInfo outputTexture, long presentationTimeUs); - /** Called when the {@link GlTextureProcessor} will not produce further output frames. */ - void onOutputStreamEnded(); + /** + * Called when the {@link GlTextureProcessor} will not produce further output frames belonging + * to the current output stream. + */ + void onCurrentOutputStreamEnded(); /** * Called when an exception occurs during asynchronous frame processing. @@ -110,8 +113,15 @@ public interface GlTextureProcessor { */ void releaseOutputFrame(TextureInfo outputTexture); - /** Notifies the texture processor that no further input frames will become available. */ - void signalEndOfInputStream(); + /** + * Notifies the {@code GlTextureProcessor} that no further input frames belonging to the current + * input stream will be queued. + * + *

Input frames that are queued after this method is called belong to a different input stream, + * so presentation timestamps may reset to start from a smaller presentation timestamp than the + * last frame of the previous input stream. + */ + void signalEndOfCurrentInputStream(); /** * Releases all resources. diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleFrameGlTextureProcessor.java b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleFrameGlTextureProcessor.java index 4b2624c8f8..ae3f3953e2 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/SingleFrameGlTextureProcessor.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/SingleFrameGlTextureProcessor.java @@ -142,9 +142,9 @@ public abstract class SingleFrameGlTextureProcessor implements GlTextureProcesso } @Override - public final void signalEndOfInputStream() { + public final void signalEndOfCurrentInputStream() { if (listener != null) { - listener.onOutputStreamEnded(); + listener.onCurrentOutputStreamEnded(); } } diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoTranscodingSamplePipeline.java b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoTranscodingSamplePipeline.java index 89831de8a4..4f2344c1af 100644 --- a/libraries/transformer/src/main/java/androidx/media3/transformer/VideoTranscodingSamplePipeline.java +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/VideoTranscodingSamplePipeline.java @@ -131,7 +131,6 @@ import org.checkerframework.dataflow.qual.Pure; } } }, - streamOffsetUs, effectsListBuilder.build(), debugViewProvider, // HDR is only used if the MediaCodec encoder supports FEATURE_HdrEditing. This @@ -143,7 +142,8 @@ import org.checkerframework.dataflow.qual.Pure; e, TransformationException.ERROR_CODE_GL_INIT_FAILED); } frameProcessor.setInputFrameInfo( - new FrameInfo(decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio)); + new FrameInfo( + decodedWidth, decodedHeight, inputFormat.pixelWidthHeightRatio, streamOffsetUs)); boolean isToneMappingRequired = ColorInfo.isHdr(inputFormat.colorInfo) && !encoderWrapper.isHdrEditingEnabled(); @@ -178,7 +178,7 @@ import org.checkerframework.dataflow.qual.Pure; processedData = true; } if (decoder.isEnded()) { - frameProcessor.signalEndOfInputStream(); + frameProcessor.signalEndOfInput(); } // If the decoder produced output, signal that it may be possible to process data again. return processedData; diff --git a/libraries/transformer/src/test/java/androidx/media3/transformer/ChainingGlTextureProcessorListenerTest.java b/libraries/transformer/src/test/java/androidx/media3/transformer/ChainingGlTextureProcessorListenerTest.java index c48ca303bf..5f141cd0ae 100644 --- a/libraries/transformer/src/test/java/androidx/media3/transformer/ChainingGlTextureProcessorListenerTest.java +++ b/libraries/transformer/src/test/java/androidx/media3/transformer/ChainingGlTextureProcessorListenerTest.java @@ -125,10 +125,10 @@ public final class ChainingGlTextureProcessorListenerTest { @Test public void onOutputStreamEnded_signalsInputStreamEndedToNextGlTextureProcessor() throws InterruptedException { - chainingGlTextureProcessorListener.onOutputStreamEnded(); + chainingGlTextureProcessorListener.onCurrentOutputStreamEnded(); Thread.sleep(EXECUTOR_WAIT_TIME_MS); - verify(fakeNextGlTextureProcessor, times(1)).signalEndOfInputStream(); + verify(fakeNextGlTextureProcessor, times(1)).signalEndOfCurrentInputStream(); } private static class FakeGlTextureProcessor implements GlTextureProcessor { @@ -155,7 +155,7 @@ public final class ChainingGlTextureProcessorListenerTest { public void releaseOutputFrame(TextureInfo outputTexture) {} @Override - public void signalEndOfInputStream() {} + public void signalEndOfCurrentInputStream() {} @Override public void release() {}