Add a class wrapping GlTextureInfo and presentation time

The grouping is used in some places and will be used in dynamic effect update

PiperOrigin-RevId: 729569353
This commit is contained in:
claincly 2025-02-21 10:08:59 -08:00 committed by Copybara-Service
parent 7c2e8c1c4b
commit 4c3ac81873
6 changed files with 87 additions and 67 deletions

View File

@ -201,8 +201,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
InputFrameInfo inputFrameInfo = InputFrameInfo inputFrameInfo =
new InputFrameInfo( new InputFrameInfo(
textureProducer, textureProducer,
inputTexture, new TimedGlTextureInfo(inputTexture, presentationTimeUs),
presentationTimeUs,
settings.getOverlaySettings(inputIndex, presentationTimeUs)); settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo); inputSource.frameInfos.add(inputFrameInfo);
@ -260,13 +259,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
// nextTimestampToComposite. // nextTimestampToComposite.
@Nullable InputFrameInfo nextPrimaryFrame = primaryInputSource.frameInfos.peek(); @Nullable InputFrameInfo nextPrimaryFrame = primaryInputSource.frameInfos.peek();
long nextTimestampToComposite = long nextTimestampToComposite =
nextPrimaryFrame != null ? nextPrimaryFrame.presentationTimeUs : C.TIME_UNSET; nextPrimaryFrame != null
? nextPrimaryFrame.timedGlTextureInfo.presentationTimeUs
: C.TIME_UNSET;
int numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp = int numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp =
Iterables.size( Iterables.size(
Iterables.filter( Iterables.filter(
secondaryInputSource.frameInfos, secondaryInputSource.frameInfos,
frame -> frame.presentationTimeUs <= nextTimestampToComposite)); frame -> frame.timedGlTextureInfo.presentationTimeUs <= nextTimestampToComposite));
releaseFrames( releaseFrames(
secondaryInputSource, secondaryInputSource,
/* numberOfFramesToRelease= */ max( /* numberOfFramesToRelease= */ max(
@ -277,7 +278,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
for (int i = 0; i < numberOfFramesToRelease; i++) { for (int i = 0; i < numberOfFramesToRelease; i++) {
InputFrameInfo frameInfoToRelease = inputSource.frameInfos.remove(); InputFrameInfo frameInfoToRelease = inputSource.frameInfos.remove();
frameInfoToRelease.textureProducer.releaseOutputTexture( frameInfoToRelease.textureProducer.releaseOutputTexture(
frameInfoToRelease.presentationTimeUs); frameInfoToRelease.timedGlTextureInfo.presentationTimeUs);
} }
} }
@ -302,7 +303,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>(); ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) { for (int i = 0; i < framesToComposite.size(); i++) {
GlTextureInfo texture = framesToComposite.get(i).texture; GlTextureInfo texture = framesToComposite.get(i).timedGlTextureInfo.glTextureInfo;
inputSizes.add(new Size(texture.width, texture.height)); inputSizes.add(new Size(texture.width, texture.height));
} }
Size outputSize = settings.getOutputSize(inputSizes.build()); Size outputSize = settings.getOutputSize(inputSizes.build());
@ -310,7 +311,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
glObjectsProvider, outputSize.getWidth(), outputSize.getHeight()); glObjectsProvider, outputSize.getWidth(), outputSize.getHeight());
GlTextureInfo outputTexture = outputTexturePool.useTexture(); GlTextureInfo outputTexture = outputTexturePool.useTexture();
long outputPresentationTimestampUs = primaryInputFrame.presentationTimeUs; long outputPresentationTimestampUs = primaryInputFrame.timedGlTextureInfo.presentationTimeUs;
outputTextureTimestamps.add(outputPresentationTimestampUs); outputTextureTimestamps.add(outputPresentationTimestampUs);
compositorGlProgram.drawFrame(framesToComposite, outputTexture); compositorGlProgram.drawFrame(framesToComposite, outputTexture);
@ -369,16 +370,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
Iterator<InputFrameInfo> frameInfosIterator = secondaryInputSource.frameInfos.iterator(); Iterator<InputFrameInfo> frameInfosIterator = secondaryInputSource.frameInfos.iterator();
while (frameInfosIterator.hasNext()) { while (frameInfosIterator.hasNext()) {
InputFrameInfo candidateFrame = frameInfosIterator.next(); InputFrameInfo candidateFrame = frameInfosIterator.next();
long candidateTimestampUs = candidateFrame.presentationTimeUs; long candidateTimestampUs = candidateFrame.timedGlTextureInfo.presentationTimeUs;
long candidateAbsDistance = long candidateAbsDistance =
abs(candidateTimestampUs - primaryFrameToComposite.presentationTimeUs); abs(
candidateTimestampUs
- primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs);
if (candidateAbsDistance < minTimeDiffFromPrimaryUs) { if (candidateAbsDistance < minTimeDiffFromPrimaryUs) {
minTimeDiffFromPrimaryUs = candidateAbsDistance; minTimeDiffFromPrimaryUs = candidateAbsDistance;
secondaryFrameToComposite = candidateFrame; secondaryFrameToComposite = candidateFrame;
} }
if (candidateTimestampUs > primaryFrameToComposite.presentationTimeUs if (candidateTimestampUs > primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs
|| (!frameInfosIterator.hasNext() && secondaryInputSource.isInputEnded)) { || (!frameInfosIterator.hasNext() && secondaryInputSource.isInputEnded)) {
framesToComposite.add(checkNotNull(secondaryFrameToComposite)); framesToComposite.add(checkNotNull(secondaryFrameToComposite));
break; break;
@ -503,7 +506,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
private void blendOntoFocusedTexture(InputFrameInfo inputFrameInfo) throws GlUtil.GlException { private void blendOntoFocusedTexture(InputFrameInfo inputFrameInfo) throws GlUtil.GlException {
GlProgram glProgram = checkNotNull(this.glProgram); GlProgram glProgram = checkNotNull(this.glProgram);
GlTextureInfo inputTexture = inputFrameInfo.texture; GlTextureInfo inputTexture = inputFrameInfo.timedGlTextureInfo.glTextureInfo;
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexture.texId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexture.texId, /* texUnitIndex= */ 0);
float[] transformationMatrix = float[] transformationMatrix =
overlayMatrixProvider.getTransformationMatrix( overlayMatrixProvider.getTransformationMatrix(
@ -537,18 +540,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
/** Holds information on a frame and how to release it. */ /** Holds information on a frame and how to release it. */
private static final class InputFrameInfo { private static final class InputFrameInfo {
public final GlTextureProducer textureProducer; public final GlTextureProducer textureProducer;
public final GlTextureInfo texture; public final TimedGlTextureInfo timedGlTextureInfo;
public final long presentationTimeUs;
public final OverlaySettings overlaySettings; public final OverlaySettings overlaySettings;
public InputFrameInfo( public InputFrameInfo(
GlTextureProducer textureProducer, GlTextureProducer textureProducer,
GlTextureInfo texture, TimedGlTextureInfo timedGlTextureInfo,
long presentationTimeUs,
OverlaySettings overlaySettings) { OverlaySettings overlaySettings) {
this.textureProducer = textureProducer; this.textureProducer = textureProducer;
this.texture = texture; this.timedGlTextureInfo = timedGlTextureInfo;
this.presentationTimeUs = presentationTimeUs;
this.overlaySettings = overlaySettings; this.overlaySettings = overlaySettings;
} }
} }

View File

@ -28,7 +28,6 @@ import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLExt; import android.opengl.EGLExt;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import android.util.Pair;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -86,7 +85,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final Executor videoFrameProcessorListenerExecutor; private final Executor videoFrameProcessorListenerExecutor;
private final VideoFrameProcessor.Listener videoFrameProcessorListener; private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final Queue<Pair<GlTextureInfo, Long>> availableFrames; private final Queue<TimedGlTextureInfo> availableFrames;
private final TexturePool outputTexturePool; private final TexturePool outputTexturePool;
private final LongArrayQueue outputTextureTimestamps; // Synchronized with outputTexturePool. private final LongArrayQueue outputTextureTimestamps; // Synchronized with outputTexturePool.
private final LongArrayQueue syncObjects; private final LongArrayQueue syncObjects;
@ -221,7 +220,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
presentationTimeUs, presentationTimeUs,
/* renderTimeNs= */ presentationTimeUs * 1000); /* renderTimeNs= */ presentationTimeUs * 1000);
} else { } else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs)); availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
} }
inputListener.onReadyToAcceptInputFrame(); inputListener.onReadyToAcceptInputFrame();
} else { } else {
@ -307,11 +306,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return; return;
} }
checkState(!renderFramesAutomatically); checkState(!renderFramesAutomatically);
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove(); TimedGlTextureInfo oldestAvailableFrame = availableFrames.remove();
renderFrame( renderFrame(
glObjectsProvider, glObjectsProvider,
/* inputTexture= */ oldestAvailableFrame.first, oldestAvailableFrame.glTextureInfo,
/* presentationTimeUs= */ oldestAvailableFrame.second, oldestAvailableFrame.presentationTimeUs,
renderTimeNs); renderTimeNs);
if (availableFrames.isEmpty() && isInputStreamEndedWithPendingAvailableFrames) { if (availableFrames.isEmpty() && isInputStreamEndedWithPendingAvailableFrames) {
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed(); checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();

View File

@ -15,7 +15,6 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import android.util.Pair;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
@ -39,7 +38,7 @@ import java.util.Queue;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this") @GuardedBy("this")
private final Queue<Pair<GlTextureInfo, Long>> availableFrames; private final Queue<TimedGlTextureInfo> availableFrames;
@GuardedBy("this") @GuardedBy("this")
private int consumingGlShaderProgramInputCapacity; private int consumingGlShaderProgramInputCapacity;
@ -63,7 +62,7 @@ import java.util.Queue;
@Override @Override
public synchronized void onReadyToAcceptInputFrame() { public synchronized void onReadyToAcceptInputFrame() {
@Nullable Pair<GlTextureInfo, Long> pendingFrame = availableFrames.poll(); @Nullable TimedGlTextureInfo pendingFrame = availableFrames.poll();
if (pendingFrame == null) { if (pendingFrame == null) {
consumingGlShaderProgramInputCapacity++; consumingGlShaderProgramInputCapacity++;
return; return;
@ -72,11 +71,9 @@ import java.util.Queue;
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> () ->
consumingGlShaderProgram.queueInputFrame( consumingGlShaderProgram.queueInputFrame(
glObjectsProvider, glObjectsProvider, pendingFrame.glTextureInfo, pendingFrame.presentationTimeUs));
/* inputTexture= */ pendingFrame.first, @Nullable TimedGlTextureInfo nextPendingFrame = availableFrames.peek();
/* presentationTimeUs= */ pendingFrame.second)); if (nextPendingFrame != null && nextPendingFrame.presentationTimeUs == C.TIME_END_OF_SOURCE) {
@Nullable Pair<GlTextureInfo, Long> nextPendingFrame = availableFrames.peek();
if (nextPendingFrame != null && nextPendingFrame.second == C.TIME_END_OF_SOURCE) {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream); consumingGlShaderProgram::signalEndOfCurrentInputStream);
availableFrames.remove(); availableFrames.remove();
@ -97,7 +94,7 @@ import java.util.Queue;
glObjectsProvider, inputTexture, presentationTimeUs)); glObjectsProvider, inputTexture, presentationTimeUs));
consumingGlShaderProgramInputCapacity--; consumingGlShaderProgramInputCapacity--;
} else { } else {
availableFrames.add(Pair.create(inputTexture, presentationTimeUs)); availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
} }
} }
@ -107,7 +104,7 @@ import java.util.Queue;
*/ */
public synchronized void signalEndOfCurrentStream() { public synchronized void signalEndOfCurrentStream() {
if (!availableFrames.isEmpty()) { if (!availableFrames.isEmpty()) {
availableFrames.add(Pair.create(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE)); availableFrames.add(new TimedGlTextureInfo(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else { } else {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream); consumingGlShaderProgram::signalEndOfCurrentInputStream);

View File

@ -87,7 +87,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
private final ExecutorService sharedExecutorService; private final ExecutorService sharedExecutorService;
private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory; private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Queue<CompositorOutputTextureInfo> compositorOutputTextures; private final Queue<TimedGlTextureInfo> compositorOutputTextures;
private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases; private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases;
private final long initialTimestampOffsetUs; private final long initialTimestampOffsetUs;
@ -363,8 +363,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
COMPONENT_COMPOSITOR, EVENT_OUTPUT_TEXTURE_RENDERED, presentationTimeUs); COMPONENT_COMPOSITOR, EVENT_OUTPUT_TEXTURE_RENDERED, presentationTimeUs);
compositorOutputTextures.add( compositorOutputTextures.add(new TimedGlTextureInfo(outputTexture, presentationTimeUs));
new CompositorOutputTextureInfo(outputTexture, presentationTimeUs));
compositorOutputTextureReleases.put( compositorOutputTextureReleases.put(
outputTexture.texId, outputTexture.texId,
new CompositorOutputTextureRelease(textureProducer, presentationTimeUs)); new CompositorOutputTextureRelease(textureProducer, presentationTimeUs));
@ -421,7 +420,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
return; return;
} }
@Nullable CompositorOutputTextureInfo outputTexture = compositorOutputTextures.peek(); @Nullable TimedGlTextureInfo outputTexture = compositorOutputTextures.peek();
if (outputTexture == null) { if (outputTexture == null) {
return; return;
} }
@ -446,16 +445,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
: VideoFrameProcessingException.from(e))); : VideoFrameProcessingException.from(e)));
} }
private static final class CompositorOutputTextureInfo {
public final GlTextureInfo glTextureInfo;
public final long presentationTimeUs;
private CompositorOutputTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
this.glTextureInfo = glTextureInfo;
this.presentationTimeUs = presentationTimeUs;
}
}
private static final class CompositorOutputTextureRelease { private static final class CompositorOutputTextureRelease {
private final GlTextureProducer textureProducer; private final GlTextureProducer textureProducer;
private final long presentationTimeUs; private final long presentationTimeUs;

View File

@ -142,7 +142,7 @@ import java.util.concurrent.TimeUnit;
private final ConcurrentEffect<T> concurrentEffect; private final ConcurrentEffect<T> concurrentEffect;
private final TexturePool outputTexturePool; private final TexturePool outputTexturePool;
private final Queue<TimedTextureInfo<T>> frameQueue; private final Queue<QueuedFrame<T>> frameQueue;
private InputListener inputListener; private InputListener inputListener;
private OutputListener outputListener; private OutputListener outputListener;
private ErrorListener errorListener; private ErrorListener errorListener;
@ -226,7 +226,8 @@ import java.util.concurrent.TimeUnit;
Future<T> task = Future<T> task =
concurrentEffect.queueInputFrame(glObjectsProvider, outputTexture, presentationTimeUs); concurrentEffect.queueInputFrame(glObjectsProvider, outputTexture, presentationTimeUs);
frameQueue.add(new TimedTextureInfo<T>(outputTexture, presentationTimeUs, task)); frameQueue.add(
new QueuedFrame<T>(new TimedGlTextureInfo(outputTexture, presentationTimeUs), task));
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
@ -297,25 +298,28 @@ import java.util.concurrent.TimeUnit;
* <p>Returns {@code false} if no more frames are available for output. * <p>Returns {@code false} if no more frames are available for output.
*/ */
private boolean outputOneFrame() { private boolean outputOneFrame() {
TimedTextureInfo<T> timedTextureInfo = frameQueue.poll(); QueuedFrame<T> queuedFrame = frameQueue.poll();
if (timedTextureInfo == null) { if (queuedFrame == null) {
return false; return false;
} }
try { try {
T result = T result =
Futures.getChecked( Futures.getChecked(
timedTextureInfo.task, queuedFrame.task,
VideoFrameProcessingException.class, VideoFrameProcessingException.class,
PROCESSING_TIMEOUT_MS, PROCESSING_TIMEOUT_MS,
TimeUnit.MILLISECONDS); TimeUnit.MILLISECONDS);
GlUtil.focusFramebufferUsingCurrentContext( GlUtil.focusFramebufferUsingCurrentContext(
timedTextureInfo.textureInfo.fboId, queuedFrame.timedGlTextureInfo.glTextureInfo.fboId,
timedTextureInfo.textureInfo.width, queuedFrame.timedGlTextureInfo.glTextureInfo.width,
timedTextureInfo.textureInfo.height); queuedFrame.timedGlTextureInfo.glTextureInfo.height);
concurrentEffect.finishProcessingAndBlend( concurrentEffect.finishProcessingAndBlend(
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs, result); queuedFrame.timedGlTextureInfo.glTextureInfo,
queuedFrame.timedGlTextureInfo.presentationTimeUs,
result);
outputListener.onOutputFrameAvailable( outputListener.onOutputFrameAvailable(
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs); queuedFrame.timedGlTextureInfo.glTextureInfo,
queuedFrame.timedGlTextureInfo.presentationTimeUs);
return true; return true;
} catch (GlUtil.GlException | VideoFrameProcessingException e) { } catch (GlUtil.GlException | VideoFrameProcessingException e) {
onError(e); onError(e);
@ -324,9 +328,9 @@ import java.util.concurrent.TimeUnit;
} }
private void cancelProcessingOfPendingFrames() { private void cancelProcessingOfPendingFrames() {
TimedTextureInfo<T> timedTextureInfo; QueuedFrame<T> queuedFrame;
while ((timedTextureInfo = frameQueue.poll()) != null) { while ((queuedFrame = frameQueue.poll()) != null) {
timedTextureInfo.task.cancel(/* mayInterruptIfRunning= */ false); queuedFrame.task.cancel(/* mayInterruptIfRunning= */ false);
} }
} }
@ -335,14 +339,12 @@ import java.util.concurrent.TimeUnit;
() -> errorListener.onError(VideoFrameProcessingException.from(e))); () -> errorListener.onError(VideoFrameProcessingException.from(e)));
} }
private static class TimedTextureInfo<T> { private static final class QueuedFrame<T> {
final GlTextureInfo textureInfo; public final TimedGlTextureInfo timedGlTextureInfo;
final long presentationTimeUs; public final Future<T> task;
final Future<T> task;
TimedTextureInfo(GlTextureInfo textureInfo, long presentationTimeUs, Future<T> task) { public QueuedFrame(TimedGlTextureInfo timedGlTextureInfo, Future<T> task) {
this.textureInfo = textureInfo; this.timedGlTextureInfo = timedGlTextureInfo;
this.presentationTimeUs = presentationTimeUs;
this.task = task; this.task = task;
} }
} }

View File

@ -0,0 +1,33 @@
/*
* Copyright 2025 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
import androidx.media3.common.GlTextureInfo;
/* package */ final class TimedGlTextureInfo {
/** The {@link GlTextureInfo}. */
public final GlTextureInfo glTextureInfo;
/** The designated presentation time with the texture, in microseconds. */
public final long presentationTimeUs;
/** Creates a new instance. */
public TimedGlTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
this.glTextureInfo = glTextureInfo;
this.presentationTimeUs = presentationTimeUs;
}
}