PiperOrigin-RevId: 649003095
This commit is contained in:
andrewlewis 2024-07-03 02:49:04 -07:00 committed by Copybara-Service
parent b531d93b90
commit 0b96f4372f
8 changed files with 167 additions and 36 deletions

View File

@ -18,6 +18,10 @@
* Image:
* DRM:
* Effect:
* Deprecate `DefaultVideoFrameProcessor#setInputDefaultBufferSize` and
`DefaultVideoFrameProcessor.Builder#setRequireRegisteringAllInputFrames`.
Use the new frame processor input type
`INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION` instead.
* Muxers:
* IMA extension:
* Session:

View File

@ -48,12 +48,18 @@ import java.util.concurrent.Executor;
public interface VideoFrameProcessor {
/**
* Specifies how the input frames are made available to the {@link VideoFrameProcessor}. One of
* {@link #INPUT_TYPE_SURFACE}, {@link #INPUT_TYPE_BITMAP} or {@link #INPUT_TYPE_TEXTURE_ID}.
* {@link #INPUT_TYPE_SURFACE}, {@link #INPUT_TYPE_BITMAP}, {@link #INPUT_TYPE_TEXTURE_ID} or
* {@link #INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({INPUT_TYPE_SURFACE, INPUT_TYPE_BITMAP, INPUT_TYPE_TEXTURE_ID})
@IntDef({
INPUT_TYPE_SURFACE,
INPUT_TYPE_BITMAP,
INPUT_TYPE_TEXTURE_ID,
INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION,
})
@interface InputType {}
/**
@ -73,6 +79,16 @@ public interface VideoFrameProcessor {
*/
int INPUT_TYPE_TEXTURE_ID = 3;
/**
* Input frames come from the {@linkplain #getInputSurface input surface} and don't need to be
* {@linkplain #registerInputFrame registered} (unlike with {@link #INPUT_TYPE_SURFACE}).
*
* <p>Every frame must use the {@linkplain #registerInputStream(int, List, FrameInfo) input
* stream's registered} frame info. Also sets the surface's {@linkplain
* android.graphics.SurfaceTexture#setDefaultBufferSize(int, int) default buffer size}.
*/
int INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION = 4;
/** A factory for {@link VideoFrameProcessor} instances. */
interface Factory {

View File

@ -16,12 +16,15 @@
package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmapUnpremultipliedAlpha;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
@ -32,8 +35,10 @@ import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.SystemClock;
import androidx.media3.common.util.Util;
import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
@ -41,6 +46,7 @@ import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -288,6 +294,74 @@ public class DefaultVideoFrameProcessorTest {
.isAtLeast(firstStreamLastFrameAvailableTimeMs.get());
}
@Test
public void registerInputStreamWithAutomaticFrameRegistration_succeeds() throws Exception {
CountDownLatch inputStreamRegisteredCountDownLatch = new CountDownLatch(1);
AtomicInteger outputFrameCount = new AtomicInteger();
AtomicReference<@NullableType Exception> error = new AtomicReference<>();
CountDownLatch endedCountDownLatch = new CountDownLatch(1);
defaultVideoFrameProcessor =
createDefaultVideoFrameProcessor(
new VideoFrameProcessor.Listener() {
@Override
public void onInputStreamRegistered(
@VideoFrameProcessor.InputType int inputType,
List<Effect> effects,
FrameInfo frameInfo) {
inputStreamRegisteredCountDownLatch.countDown();
}
@Override
public void onOutputSizeChanged(int width, int height) {}
@Override
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
outputFrameCount.incrementAndGet();
}
@Override
public void onError(VideoFrameProcessingException exception) {
error.set(exception);
}
@Override
public void onEnded() {
endedCountDownLatch.countDown();
}
});
Bitmap bitmap = BitmapPixelTestUtil.readBitmap(ORIGINAL_PNG_ASSET_PATH);
defaultVideoFrameProcessor.registerInputStream(
VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION,
/* effects= */ ImmutableList.of(),
new FrameInfo.Builder(ColorInfo.SRGB_BT709_FULL, bitmap.getWidth(), bitmap.getHeight())
.build());
inputStreamRegisteredCountDownLatch.await();
checkState(defaultVideoFrameProcessor.registerInputFrame());
int inputFrameCount = 2;
Surface surface = defaultVideoFrameProcessor.getInputSurface();
for (int i = 0; i < inputFrameCount; i++) {
Canvas canvas = surface.lockCanvas(/* inOutDirty= */ null);
// Load the bitmap each time, as it's recycled after each use.
canvas.drawBitmap(
BitmapPixelTestUtil.readBitmap(ORIGINAL_PNG_ASSET_PATH),
/* left= */ 0f,
/* top= */ 0f,
/* paint= */ null);
// This causes a frame to become available on the input surface, which is processed by the
// video frame processor.
surface.unlockCanvasAndPost(canvas);
}
defaultVideoFrameProcessor.signalEndOfInput();
if (!endedCountDownLatch.await(TEST_TIMEOUT_MS, MILLISECONDS)) {
throw new IllegalStateException("Test timeout", error.get());
}
assertThat(error.get()).isNull();
assertThat(outputFrameCount.get()).isEqualTo(inputFrameCount);
}
private DefaultVideoFrameProcessor createDefaultVideoFrameProcessor(
VideoFrameProcessor.Listener listener) throws Exception {
return checkNotNull(factory)

View File

@ -199,7 +199,14 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
* <p>Regardless of the value set, {@link #registerInputStream(int, List, FrameInfo)} must be
* called for each input stream to specify the format for upcoming frames before calling
* {@link #registerInputFrame()}.
*
* @param requireRegisteringAllInputFrames Whether registering every input frame is required.
* @deprecated For automatic frame registration ({@code
* setRequireRegisteringAllInputFrames(false)}), use {@link
* VideoFrameProcessor#INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION} instead. This call
* can be removed otherwise.
*/
@Deprecated
@CanIgnoreReturnValue
public Builder setRequireRegisteringAllInputFrames(boolean requireRegisteringAllInputFrames) {
this.requireRegisteringAllInputFrames = requireRegisteringAllInputFrames;
@ -514,7 +521,11 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
*
* @param width The default width for input buffers, in pixels.
* @param height The default height for input buffers, in pixels.
* @deprecated Set the input type to {@link
* VideoFrameProcessor#INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION} instead, which sets
* the default buffer size automatically based on the registered frame info.
*/
@Deprecated
public void setInputDefaultBufferSize(int width, int height) {
inputSwitcher.setInputDefaultBufferSize(width, height);
}
@ -933,6 +944,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
return "Bitmap";
case INPUT_TYPE_TEXTURE_ID:
return "Texture ID";
case INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION:
return "Surface with automatic frame registration";
default:
throw new IllegalArgumentException(String.valueOf(inputType));
}

View File

@ -87,7 +87,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final float[] textureTransformMatrix;
private final Queue<FrameInfo> pendingFrames;
private final ScheduledExecutorService scheduledExecutorService;
private final boolean repeatLastRegisteredFrame;
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
// Must be accessed on the GL thread.
@ -98,6 +97,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// The frame that is sent downstream and is not done processing yet.
@Nullable private FrameInfo currentFrame;
@Nullable private FrameInfo lastRegisteredFrame;
private boolean repeatLastRegisteredFrame;
@Nullable private Future<?> forceSignalEndOfStreamFuture;
private boolean shouldRejectIncomingFrames;
@ -234,6 +234,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
});
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo, boolean automaticReregistration) {
// Ignore inputFrameInfo when not automatically re-registering frames because it's also passed
// to registerInputFrame.
repeatLastRegisteredFrame = automaticReregistration;
if (repeatLastRegisteredFrame) {
lastRegisteredFrame = inputFrameInfo;
surfaceTexture.setDefaultBufferSize(inputFrameInfo.width, inputFrameInfo.height);
}
}
/**
* Notifies the {@code ExternalTextureManager} that a frame with the given {@link FrameInfo} will
* become available via the {@link SurfaceTexture} eventually.

View File

@ -19,6 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
@ -81,15 +82,16 @@ import org.checkerframework.checker.nullness.qual.Nullable;
this.experimentalAdjustSurfaceTextureTransformationMatrix =
experimentalAdjustSurfaceTextureTransformationMatrix;
// TODO(b/274109008): Investigate lazy instantiating the texture managers.
inputs.put(
INPUT_TYPE_SURFACE,
// TODO(b/274109008): Investigate lazily instantiating the texture managers.
Input surfaceInput =
new Input(
new ExternalTextureManager(
glObjectsProvider,
videoFrameProcessingTaskExecutor,
repeatLastRegisteredFrame,
experimentalAdjustSurfaceTextureTransformationMatrix)));
experimentalAdjustSurfaceTextureTransformationMatrix));
inputs.put(INPUT_TYPE_SURFACE, surfaceInput);
inputs.put(INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION, surfaceInput);
inputs.put(
INPUT_TYPE_BITMAP,
new Input(
@ -109,6 +111,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
DefaultShaderProgram samplingShaderProgram;
switch (inputType) {
case INPUT_TYPE_SURFACE:
case INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION:
samplingShaderProgram =
DefaultShaderProgram.createWithExternalSampler(
context,
@ -152,29 +155,28 @@ import org.checkerframework.checker.nullness.qual.Nullable;
checkState(contains(inputs, newInputType), "Input type not registered: " + newInputType);
for (int i = 0; i < inputs.size(); i++) {
@VideoFrameProcessor.InputType int inputType = inputs.keyAt(i);
Input input = inputs.get(inputType);
if (inputType == newInputType) {
if (input.getInputColorInfo() == null
|| !newInputFrameInfo.colorInfo.equals(input.getInputColorInfo())) {
input.setSamplingGlShaderProgram(
createSamplingShaderProgram(newInputFrameInfo.colorInfo, newInputType));
input.setInputColorInfo(newInputFrameInfo.colorInfo);
}
input.setChainingListener(
new GatedChainingListenerWrapper(
glObjectsProvider,
checkNotNull(input.getSamplingGlShaderProgram()),
this.downstreamShaderProgram,
videoFrameProcessingTaskExecutor));
input.setActive(true);
downstreamShaderProgram.setInputListener(checkNotNull(input.gatedChainingListenerWrapper));
activeTextureManager = input.textureManager;
} else {
input.setActive(false);
}
inputs.get(inputs.keyAt(i)).setActive(false);
}
checkNotNull(activeTextureManager).setInputFrameInfo(newInputFrameInfo);
// Activate the relevant input for the new input type.
Input input = inputs.get(newInputType);
if (input.getInputColorInfo() == null
|| !newInputFrameInfo.colorInfo.equals(input.getInputColorInfo())) {
input.setSamplingGlShaderProgram(
createSamplingShaderProgram(newInputFrameInfo.colorInfo, newInputType));
input.setInputColorInfo(newInputFrameInfo.colorInfo);
}
input.setChainingListener(
new GatedChainingListenerWrapper(
glObjectsProvider,
checkNotNull(input.getSamplingGlShaderProgram()),
this.downstreamShaderProgram,
videoFrameProcessingTaskExecutor));
input.setActive(true);
downstreamShaderProgram.setInputListener(checkNotNull(input.gatedChainingListenerWrapper));
activeTextureManager = input.textureManager;
boolean automaticRegistration = newInputType == INPUT_TYPE_SURFACE_AUTOMATIC_FRAME_REGISTRATION;
checkNotNull(activeTextureManager).setInputFrameInfo(newInputFrameInfo, automaticRegistration);
}
/** Returns whether the {@code InputSwitcher} is connected to an active input. */
@ -203,7 +205,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Returns the input {@link Surface}.
*
* @return The input {@link Surface}, regardless if the current input is {@linkplain
* @return The input {@link Surface}, regardless of whether the current input is {@linkplain
* #switchToInput set} to {@link VideoFrameProcessor#INPUT_TYPE_SURFACE}.
*/
public Surface getInputSurface() {
@ -242,6 +244,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
private @MonotonicNonNull ExternalShaderProgram samplingGlShaderProgram;
private @MonotonicNonNull ColorInfo inputColorInfo;
private @MonotonicNonNull GatedChainingListenerWrapper gatedChainingListenerWrapper;
private boolean released;
public Input(TextureManager textureManager) {
this.textureManager = textureManager;
@ -282,9 +285,12 @@ import org.checkerframework.checker.nullness.qual.Nullable;
}
public void release() throws VideoFrameProcessingException {
textureManager.release();
if (samplingGlShaderProgram != null) {
samplingGlShaderProgram.release();
if (!released) {
released = true;
textureManager.release();
if (samplingGlShaderProgram != null) {
samplingGlShaderProgram.release();
}
}
}
}

View File

@ -106,7 +106,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
@Override
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
public void setInputFrameInfo(FrameInfo inputFrameInfo, boolean automaticReregistration) {
this.inputFrameInfo = inputFrameInfo;
}

View File

@ -107,12 +107,19 @@ import androidx.media3.common.util.TimestampIterator;
* Sets information about the input frames.
*
* <p>The new input information is applied from the next frame {@linkplain #registerInputFrame
* registered} or {@linkplain #queueInputTexture queued} onwards.
* registered} or {@linkplain #queueInputTexture queued} onwards. If the implementation requires
* frames to be registered, it may use the {@link FrameInfo} passed to {@link
* #registerInputFrame(FrameInfo)} instead of the one passed here.
*
* <p>Pixels are expanded using the {@link FrameInfo#pixelWidthHeightRatio} so that the output
* frames' pixels have a ratio of 1.
*
* @param inputFrameInfo Information about the next input frame.
* @param automaticReregistration Whether the frames should be re-registered automatically, if
* using an input surface. Pass {@code false} if every frame will be registered before it is
* rendered to the surface.
*/
public void setInputFrameInfo(FrameInfo inputFrameInfo) {
public void setInputFrameInfo(FrameInfo inputFrameInfo, boolean automaticReregistration) {
// Do nothing.
}