mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Add a class wrapping GlTextureInfo and presentation time
The grouping is used in some places and will be used in dynamic effect update PiperOrigin-RevId: 729569353
This commit is contained in:
parent
7c2e8c1c4b
commit
4c3ac81873
@ -201,8 +201,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
InputFrameInfo inputFrameInfo =
|
||||
new InputFrameInfo(
|
||||
textureProducer,
|
||||
inputTexture,
|
||||
presentationTimeUs,
|
||||
new TimedGlTextureInfo(inputTexture, presentationTimeUs),
|
||||
settings.getOverlaySettings(inputIndex, presentationTimeUs));
|
||||
inputSource.frameInfos.add(inputFrameInfo);
|
||||
|
||||
@ -260,13 +259,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
// nextTimestampToComposite.
|
||||
@Nullable InputFrameInfo nextPrimaryFrame = primaryInputSource.frameInfos.peek();
|
||||
long nextTimestampToComposite =
|
||||
nextPrimaryFrame != null ? nextPrimaryFrame.presentationTimeUs : C.TIME_UNSET;
|
||||
nextPrimaryFrame != null
|
||||
? nextPrimaryFrame.timedGlTextureInfo.presentationTimeUs
|
||||
: C.TIME_UNSET;
|
||||
|
||||
int numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp =
|
||||
Iterables.size(
|
||||
Iterables.filter(
|
||||
secondaryInputSource.frameInfos,
|
||||
frame -> frame.presentationTimeUs <= nextTimestampToComposite));
|
||||
frame -> frame.timedGlTextureInfo.presentationTimeUs <= nextTimestampToComposite));
|
||||
releaseFrames(
|
||||
secondaryInputSource,
|
||||
/* numberOfFramesToRelease= */ max(
|
||||
@ -277,7 +278,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
for (int i = 0; i < numberOfFramesToRelease; i++) {
|
||||
InputFrameInfo frameInfoToRelease = inputSource.frameInfos.remove();
|
||||
frameInfoToRelease.textureProducer.releaseOutputTexture(
|
||||
frameInfoToRelease.presentationTimeUs);
|
||||
frameInfoToRelease.timedGlTextureInfo.presentationTimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,7 +303,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
|
||||
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
|
||||
for (int i = 0; i < framesToComposite.size(); i++) {
|
||||
GlTextureInfo texture = framesToComposite.get(i).texture;
|
||||
GlTextureInfo texture = framesToComposite.get(i).timedGlTextureInfo.glTextureInfo;
|
||||
inputSizes.add(new Size(texture.width, texture.height));
|
||||
}
|
||||
Size outputSize = settings.getOutputSize(inputSizes.build());
|
||||
@ -310,7 +311,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
glObjectsProvider, outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
GlTextureInfo outputTexture = outputTexturePool.useTexture();
|
||||
long outputPresentationTimestampUs = primaryInputFrame.presentationTimeUs;
|
||||
long outputPresentationTimestampUs = primaryInputFrame.timedGlTextureInfo.presentationTimeUs;
|
||||
outputTextureTimestamps.add(outputPresentationTimestampUs);
|
||||
|
||||
compositorGlProgram.drawFrame(framesToComposite, outputTexture);
|
||||
@ -369,16 +370,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
Iterator<InputFrameInfo> frameInfosIterator = secondaryInputSource.frameInfos.iterator();
|
||||
while (frameInfosIterator.hasNext()) {
|
||||
InputFrameInfo candidateFrame = frameInfosIterator.next();
|
||||
long candidateTimestampUs = candidateFrame.presentationTimeUs;
|
||||
long candidateTimestampUs = candidateFrame.timedGlTextureInfo.presentationTimeUs;
|
||||
long candidateAbsDistance =
|
||||
abs(candidateTimestampUs - primaryFrameToComposite.presentationTimeUs);
|
||||
abs(
|
||||
candidateTimestampUs
|
||||
- primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs);
|
||||
|
||||
if (candidateAbsDistance < minTimeDiffFromPrimaryUs) {
|
||||
minTimeDiffFromPrimaryUs = candidateAbsDistance;
|
||||
secondaryFrameToComposite = candidateFrame;
|
||||
}
|
||||
|
||||
if (candidateTimestampUs > primaryFrameToComposite.presentationTimeUs
|
||||
if (candidateTimestampUs > primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs
|
||||
|| (!frameInfosIterator.hasNext() && secondaryInputSource.isInputEnded)) {
|
||||
framesToComposite.add(checkNotNull(secondaryFrameToComposite));
|
||||
break;
|
||||
@ -503,7 +506,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
|
||||
private void blendOntoFocusedTexture(InputFrameInfo inputFrameInfo) throws GlUtil.GlException {
|
||||
GlProgram glProgram = checkNotNull(this.glProgram);
|
||||
GlTextureInfo inputTexture = inputFrameInfo.texture;
|
||||
GlTextureInfo inputTexture = inputFrameInfo.timedGlTextureInfo.glTextureInfo;
|
||||
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexture.texId, /* texUnitIndex= */ 0);
|
||||
float[] transformationMatrix =
|
||||
overlayMatrixProvider.getTransformationMatrix(
|
||||
@ -537,18 +540,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
/** Holds information on a frame and how to release it. */
|
||||
private static final class InputFrameInfo {
|
||||
public final GlTextureProducer textureProducer;
|
||||
public final GlTextureInfo texture;
|
||||
public final long presentationTimeUs;
|
||||
public final TimedGlTextureInfo timedGlTextureInfo;
|
||||
public final OverlaySettings overlaySettings;
|
||||
|
||||
public InputFrameInfo(
|
||||
GlTextureProducer textureProducer,
|
||||
GlTextureInfo texture,
|
||||
long presentationTimeUs,
|
||||
TimedGlTextureInfo timedGlTextureInfo,
|
||||
OverlaySettings overlaySettings) {
|
||||
this.textureProducer = textureProducer;
|
||||
this.texture = texture;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
this.timedGlTextureInfo = timedGlTextureInfo;
|
||||
this.overlaySettings = overlaySettings;
|
||||
}
|
||||
}
|
||||
|
@ -28,7 +28,6 @@ import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.util.Pair;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
@ -86,7 +85,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||
private final Executor videoFrameProcessorListenerExecutor;
|
||||
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
|
||||
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
|
||||
private final Queue<TimedGlTextureInfo> availableFrames;
|
||||
private final TexturePool outputTexturePool;
|
||||
private final LongArrayQueue outputTextureTimestamps; // Synchronized with outputTexturePool.
|
||||
private final LongArrayQueue syncObjects;
|
||||
@ -221,7 +220,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
presentationTimeUs,
|
||||
/* renderTimeNs= */ presentationTimeUs * 1000);
|
||||
} else {
|
||||
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
||||
availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
|
||||
}
|
||||
inputListener.onReadyToAcceptInputFrame();
|
||||
} else {
|
||||
@ -307,11 +306,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
return;
|
||||
}
|
||||
checkState(!renderFramesAutomatically);
|
||||
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
|
||||
TimedGlTextureInfo oldestAvailableFrame = availableFrames.remove();
|
||||
renderFrame(
|
||||
glObjectsProvider,
|
||||
/* inputTexture= */ oldestAvailableFrame.first,
|
||||
/* presentationTimeUs= */ oldestAvailableFrame.second,
|
||||
oldestAvailableFrame.glTextureInfo,
|
||||
oldestAvailableFrame.presentationTimeUs,
|
||||
renderTimeNs);
|
||||
if (availableFrames.isEmpty() && isInputStreamEndedWithPendingAvailableFrames) {
|
||||
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
|
||||
|
@ -15,7 +15,6 @@
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import android.util.Pair;
|
||||
import androidx.annotation.GuardedBy;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
@ -39,7 +38,7 @@ import java.util.Queue;
|
||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||
|
||||
@GuardedBy("this")
|
||||
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
|
||||
private final Queue<TimedGlTextureInfo> availableFrames;
|
||||
|
||||
@GuardedBy("this")
|
||||
private int consumingGlShaderProgramInputCapacity;
|
||||
@ -63,7 +62,7 @@ import java.util.Queue;
|
||||
|
||||
@Override
|
||||
public synchronized void onReadyToAcceptInputFrame() {
|
||||
@Nullable Pair<GlTextureInfo, Long> pendingFrame = availableFrames.poll();
|
||||
@Nullable TimedGlTextureInfo pendingFrame = availableFrames.poll();
|
||||
if (pendingFrame == null) {
|
||||
consumingGlShaderProgramInputCapacity++;
|
||||
return;
|
||||
@ -72,11 +71,9 @@ import java.util.Queue;
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() ->
|
||||
consumingGlShaderProgram.queueInputFrame(
|
||||
glObjectsProvider,
|
||||
/* inputTexture= */ pendingFrame.first,
|
||||
/* presentationTimeUs= */ pendingFrame.second));
|
||||
@Nullable Pair<GlTextureInfo, Long> nextPendingFrame = availableFrames.peek();
|
||||
if (nextPendingFrame != null && nextPendingFrame.second == C.TIME_END_OF_SOURCE) {
|
||||
glObjectsProvider, pendingFrame.glTextureInfo, pendingFrame.presentationTimeUs));
|
||||
@Nullable TimedGlTextureInfo nextPendingFrame = availableFrames.peek();
|
||||
if (nextPendingFrame != null && nextPendingFrame.presentationTimeUs == C.TIME_END_OF_SOURCE) {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
consumingGlShaderProgram::signalEndOfCurrentInputStream);
|
||||
availableFrames.remove();
|
||||
@ -97,7 +94,7 @@ import java.util.Queue;
|
||||
glObjectsProvider, inputTexture, presentationTimeUs));
|
||||
consumingGlShaderProgramInputCapacity--;
|
||||
} else {
|
||||
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
||||
availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
|
||||
}
|
||||
}
|
||||
|
||||
@ -107,7 +104,7 @@ import java.util.Queue;
|
||||
*/
|
||||
public synchronized void signalEndOfCurrentStream() {
|
||||
if (!availableFrames.isEmpty()) {
|
||||
availableFrames.add(Pair.create(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
|
||||
availableFrames.add(new TimedGlTextureInfo(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
|
||||
} else {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
consumingGlShaderProgram::signalEndOfCurrentInputStream);
|
||||
|
@ -87,7 +87,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
private final ExecutorService sharedExecutorService;
|
||||
|
||||
private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
private final Queue<CompositorOutputTextureInfo> compositorOutputTextures;
|
||||
private final Queue<TimedGlTextureInfo> compositorOutputTextures;
|
||||
private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases;
|
||||
|
||||
private final long initialTimestampOffsetUs;
|
||||
@ -363,8 +363,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
DebugTraceUtil.logEvent(
|
||||
COMPONENT_COMPOSITOR, EVENT_OUTPUT_TEXTURE_RENDERED, presentationTimeUs);
|
||||
|
||||
compositorOutputTextures.add(
|
||||
new CompositorOutputTextureInfo(outputTexture, presentationTimeUs));
|
||||
compositorOutputTextures.add(new TimedGlTextureInfo(outputTexture, presentationTimeUs));
|
||||
compositorOutputTextureReleases.put(
|
||||
outputTexture.texId,
|
||||
new CompositorOutputTextureRelease(textureProducer, presentationTimeUs));
|
||||
@ -421,7 +420,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
return;
|
||||
}
|
||||
|
||||
@Nullable CompositorOutputTextureInfo outputTexture = compositorOutputTextures.peek();
|
||||
@Nullable TimedGlTextureInfo outputTexture = compositorOutputTextures.peek();
|
||||
if (outputTexture == null) {
|
||||
return;
|
||||
}
|
||||
@ -446,16 +445,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
: VideoFrameProcessingException.from(e)));
|
||||
}
|
||||
|
||||
private static final class CompositorOutputTextureInfo {
|
||||
public final GlTextureInfo glTextureInfo;
|
||||
public final long presentationTimeUs;
|
||||
|
||||
private CompositorOutputTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
|
||||
this.glTextureInfo = glTextureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
}
|
||||
}
|
||||
|
||||
private static final class CompositorOutputTextureRelease {
|
||||
private final GlTextureProducer textureProducer;
|
||||
private final long presentationTimeUs;
|
||||
|
@ -142,7 +142,7 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
private final ConcurrentEffect<T> concurrentEffect;
|
||||
private final TexturePool outputTexturePool;
|
||||
private final Queue<TimedTextureInfo<T>> frameQueue;
|
||||
private final Queue<QueuedFrame<T>> frameQueue;
|
||||
private InputListener inputListener;
|
||||
private OutputListener outputListener;
|
||||
private ErrorListener errorListener;
|
||||
@ -226,7 +226,8 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
Future<T> task =
|
||||
concurrentEffect.queueInputFrame(glObjectsProvider, outputTexture, presentationTimeUs);
|
||||
frameQueue.add(new TimedTextureInfo<T>(outputTexture, presentationTimeUs, task));
|
||||
frameQueue.add(
|
||||
new QueuedFrame<T>(new TimedGlTextureInfo(outputTexture, presentationTimeUs), task));
|
||||
|
||||
inputListener.onInputFrameProcessed(inputTexture);
|
||||
|
||||
@ -297,25 +298,28 @@ import java.util.concurrent.TimeUnit;
|
||||
* <p>Returns {@code false} if no more frames are available for output.
|
||||
*/
|
||||
private boolean outputOneFrame() {
|
||||
TimedTextureInfo<T> timedTextureInfo = frameQueue.poll();
|
||||
if (timedTextureInfo == null) {
|
||||
QueuedFrame<T> queuedFrame = frameQueue.poll();
|
||||
if (queuedFrame == null) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
T result =
|
||||
Futures.getChecked(
|
||||
timedTextureInfo.task,
|
||||
queuedFrame.task,
|
||||
VideoFrameProcessingException.class,
|
||||
PROCESSING_TIMEOUT_MS,
|
||||
TimeUnit.MILLISECONDS);
|
||||
GlUtil.focusFramebufferUsingCurrentContext(
|
||||
timedTextureInfo.textureInfo.fboId,
|
||||
timedTextureInfo.textureInfo.width,
|
||||
timedTextureInfo.textureInfo.height);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.fboId,
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.width,
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.height);
|
||||
concurrentEffect.finishProcessingAndBlend(
|
||||
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs, result);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo,
|
||||
queuedFrame.timedGlTextureInfo.presentationTimeUs,
|
||||
result);
|
||||
outputListener.onOutputFrameAvailable(
|
||||
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo,
|
||||
queuedFrame.timedGlTextureInfo.presentationTimeUs);
|
||||
return true;
|
||||
} catch (GlUtil.GlException | VideoFrameProcessingException e) {
|
||||
onError(e);
|
||||
@ -324,9 +328,9 @@ import java.util.concurrent.TimeUnit;
|
||||
}
|
||||
|
||||
private void cancelProcessingOfPendingFrames() {
|
||||
TimedTextureInfo<T> timedTextureInfo;
|
||||
while ((timedTextureInfo = frameQueue.poll()) != null) {
|
||||
timedTextureInfo.task.cancel(/* mayInterruptIfRunning= */ false);
|
||||
QueuedFrame<T> queuedFrame;
|
||||
while ((queuedFrame = frameQueue.poll()) != null) {
|
||||
queuedFrame.task.cancel(/* mayInterruptIfRunning= */ false);
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,14 +339,12 @@ import java.util.concurrent.TimeUnit;
|
||||
() -> errorListener.onError(VideoFrameProcessingException.from(e)));
|
||||
}
|
||||
|
||||
private static class TimedTextureInfo<T> {
|
||||
final GlTextureInfo textureInfo;
|
||||
final long presentationTimeUs;
|
||||
final Future<T> task;
|
||||
private static final class QueuedFrame<T> {
|
||||
public final TimedGlTextureInfo timedGlTextureInfo;
|
||||
public final Future<T> task;
|
||||
|
||||
TimedTextureInfo(GlTextureInfo textureInfo, long presentationTimeUs, Future<T> task) {
|
||||
this.textureInfo = textureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
public QueuedFrame(TimedGlTextureInfo timedGlTextureInfo, Future<T> task) {
|
||||
this.timedGlTextureInfo = timedGlTextureInfo;
|
||||
this.task = task;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
|
||||
/* package */ final class TimedGlTextureInfo {
|
||||
|
||||
/** The {@link GlTextureInfo}. */
|
||||
public final GlTextureInfo glTextureInfo;
|
||||
|
||||
/** The designated presentation time with the texture, in microseconds. */
|
||||
public final long presentationTimeUs;
|
||||
|
||||
/** Creates a new instance. */
|
||||
public TimedGlTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
|
||||
this.glTextureInfo = glTextureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user