Effect: Rename FrameProcessor

Rename FrameProcessor to VideoFrameProcessor, and GlEffectsFrameProcessor to
DefaultVideoFrameProcessor.

Most changes are semi-mechanical, semi-manual find-replace, preserving case:
* "FrameProc" -> "VideoFrameProc" (ex. FrameProcessor -> VideoFrameProcessor, and
   FrameProcessingException -> VideoFrameProcessingException)
* "GlEffectsVideoFrameProc" -> "DefaultVideoFrameProc"

PiperOrigin-RevId: 509887384
This commit is contained in:
huangdarwin 2023-02-15 19:22:51 +00:00 committed by christosts
parent 41a03dd8a6
commit cf768329e6
52 changed files with 775 additions and 751 deletions

View File

@ -19,7 +19,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -59,7 +59,7 @@ import java.io.IOException;
* @param minInnerRadius The lower bound of the radius that is unaffected by the effect. * @param minInnerRadius The lower bound of the radius that is unaffected by the effect.
* @param maxInnerRadius The upper bound of the radius that is unaffected by the effect. * @param maxInnerRadius The upper bound of the radius that is unaffected by the effect.
* @param outerRadius The radius after which all pixels are black. * @param outerRadius The radius after which all pixels are black.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public PeriodicVignetteShaderProgram( public PeriodicVignetteShaderProgram(
Context context, Context context,
@ -69,7 +69,7 @@ import java.io.IOException;
float minInnerRadius, float minInnerRadius,
float maxInnerRadius, float maxInnerRadius,
float outerRadius) float outerRadius)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
checkArgument(minInnerRadius <= maxInnerRadius); checkArgument(minInnerRadius <= maxInnerRadius);
checkArgument(maxInnerRadius <= outerRadius); checkArgument(maxInnerRadius <= outerRadius);
@ -78,7 +78,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY}); glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY});
glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius}); glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius});
@ -95,7 +95,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -107,17 +108,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }

View File

@ -24,7 +24,7 @@ import android.content.Context;
import android.opengl.EGL14; import android.opengl.EGL14;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.LibraryLoader; import androidx.media3.common.util.LibraryLoader;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.GlShaderProgram; import androidx.media3.effect.GlShaderProgram;
@ -112,7 +112,7 @@ import java.util.concurrent.Future;
futures = new ArrayDeque<>(); futures = new ArrayDeque<>();
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {}; errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext()); EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext());
frameProcessor = frameProcessor =
@ -155,7 +155,7 @@ import java.util.concurrent.Future;
frameProcessor.setAsynchronousErrorListener( frameProcessor.setAsynchronousErrorListener(
error -> error ->
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(error)))); () -> errorListener.onError(new VideoFrameProcessingException(error))));
} }
@Override @Override
@ -191,7 +191,7 @@ import java.util.concurrent.Future;
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
if (acceptedFrame) { if (acceptedFrame) {
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
@ -213,9 +213,7 @@ import java.util.concurrent.Future;
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
if (errorListener != null) { if (errorListener != null) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () -> errorListener.onError(new VideoFrameProcessingException(e)));
errorListener.onFrameProcessingError(
new FrameProcessingException(e)));
} }
} }
} }
@ -254,14 +252,12 @@ import java.util.concurrent.Future;
try { try {
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) { if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () -> errorListener.onError(new VideoFrameProcessingException("Release timed out")));
errorListener.onFrameProcessingError(
new FrameProcessingException("Release timed out")));
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
frameProcessor.close(); frameProcessor.close();
@ -294,11 +290,11 @@ import java.util.concurrent.Future;
futures.remove().get(); futures.remove().get();
} catch (ExecutionException e) { } catch (ExecutionException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e))); () -> errorListener.onError(new VideoFrameProcessingException(e)));
} }
} }
} }

View File

@ -230,10 +230,10 @@ public class PlaybackException extends Exception implements Bundleable {
// Frame processing errors (7xxx). // Frame processing errors (7xxx).
/** Caused by a failure when initializing a {@link FrameProcessor}. */ /** Caused by a failure when initializing a {@link VideoFrameProcessor}. */
@UnstableApi public static final int ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED = 7000; @UnstableApi public static final int ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a frame. */ /** Caused by a failure when processing a video frame. */
@UnstableApi public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 7001; @UnstableApi public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 7001;
/** /**
* Player implementations that want to surface custom errors can use error codes greater than this * Player implementations that want to surface custom errors can use error codes greater than this
@ -312,10 +312,10 @@ public class PlaybackException extends Exception implements Bundleable {
return "ERROR_CODE_DRM_DEVICE_REVOKED"; return "ERROR_CODE_DRM_DEVICE_REVOKED";
case ERROR_CODE_DRM_LICENSE_EXPIRED: case ERROR_CODE_DRM_LICENSE_EXPIRED:
return "ERROR_CODE_DRM_LICENSE_EXPIRED"; return "ERROR_CODE_DRM_LICENSE_EXPIRED";
case ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED: case ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED"; return "ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_FRAME_PROCESSING_FAILED: case ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_FRAME_PROCESSING_FAILED"; return "ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED";
default: default:
if (errorCode >= CUSTOM_ERROR_CODE_BASE) { if (errorCode >= CUSTOM_ERROR_CODE_BASE) {
return "custom error code"; return "custom error code";

View File

@ -22,25 +22,26 @@ import androidx.media3.common.util.UnstableApi;
* to video frames. * to video frames.
*/ */
@UnstableApi @UnstableApi
public final class FrameProcessingException extends Exception { public final class VideoFrameProcessingException extends Exception {
/** /**
* Wraps the given exception in a {@code FrameProcessingException} if it is not already a {@code * Wraps the given exception in a {@code VideoFrameProcessingException} if it is not already a
* FrameProcessingException} and returns the exception otherwise. * {@code VideoFrameProcessingException} and returns the exception otherwise.
*/ */
public static FrameProcessingException from(Exception exception) { public static VideoFrameProcessingException from(Exception exception) {
return from(exception, /* presentationTimeUs= */ C.TIME_UNSET); return from(exception, /* presentationTimeUs= */ C.TIME_UNSET);
} }
/** /**
* Wraps the given exception in a {@code FrameProcessingException} with the given timestamp if it * Wraps the given exception in a {@code VideoFrameProcessingException} with the given timestamp
* is not already a {@code FrameProcessingException} and returns the exception otherwise. * if it is not already a {@code VideoFrameProcessingException} and returns the exception
* otherwise.
*/ */
public static FrameProcessingException from(Exception exception, long presentationTimeUs) { public static VideoFrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof FrameProcessingException) { if (exception instanceof VideoFrameProcessingException) {
return (FrameProcessingException) exception; return (VideoFrameProcessingException) exception;
} else { } else {
return new FrameProcessingException(exception, presentationTimeUs); return new VideoFrameProcessingException(exception, presentationTimeUs);
} }
} }
@ -55,7 +56,7 @@ public final class FrameProcessingException extends Exception {
* *
* @param message The detail message for this exception. * @param message The detail message for this exception.
*/ */
public FrameProcessingException(String message) { public VideoFrameProcessingException(String message) {
this(message, /* presentationTimeUs= */ C.TIME_UNSET); this(message, /* presentationTimeUs= */ C.TIME_UNSET);
} }
@ -65,7 +66,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception. * @param message The detail message for this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(String message, long presentationTimeUs) { public VideoFrameProcessingException(String message, long presentationTimeUs) {
super(message); super(message);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }
@ -76,7 +77,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception. * @param message The detail message for this exception.
* @param cause The cause of this exception. * @param cause The cause of this exception.
*/ */
public FrameProcessingException(String message, Throwable cause) { public VideoFrameProcessingException(String message, Throwable cause) {
this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET); this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET);
} }
@ -87,7 +88,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception. * @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(String message, Throwable cause, long presentationTimeUs) { public VideoFrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
super(message, cause); super(message, cause);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }
@ -97,7 +98,7 @@ public final class FrameProcessingException extends Exception {
* *
* @param cause The cause of this exception. * @param cause The cause of this exception.
*/ */
public FrameProcessingException(Throwable cause) { public VideoFrameProcessingException(Throwable cause) {
this(cause, /* presentationTimeUs= */ C.TIME_UNSET); this(cause, /* presentationTimeUs= */ C.TIME_UNSET);
} }
@ -107,7 +108,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception. * @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred. * @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/ */
public FrameProcessingException(Throwable cause, long presentationTimeUs) { public VideoFrameProcessingException(Throwable cause, long presentationTimeUs) {
super(cause); super(cause);
this.presentationTimeUs = presentationTimeUs; this.presentationTimeUs = presentationTimeUs;
} }

View File

@ -25,7 +25,7 @@ import java.util.List;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
/** /**
* Interface for a frame processor that applies changes to individual video frames. * Interface for a video frame processor that applies changes to individual video frames.
* *
* <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}. * <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}.
* *
@ -37,13 +37,13 @@ import java.util.concurrent.Executor;
* to the input {@link Surface}. * to the input {@link Surface}.
*/ */
@UnstableApi @UnstableApi
public interface FrameProcessor { public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced. // TODO(b/243036513): Allow effects to be replaced.
/** A factory for {@link FrameProcessor} instances. */ /** A factory for {@link VideoFrameProcessor} instances. */
interface Factory { interface Factory {
/** /**
* Creates a new {@link FrameProcessor} instance. * Creates a new {@link VideoFrameProcessor} instance.
* *
* @param context A {@link Context}. * @param context A {@link Context}.
* @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code * @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code
@ -55,18 +55,18 @@ public interface FrameProcessor {
* video) or not (e.g. from a {@link Bitmap}). See <a * video) or not (e.g. from a {@link Bitmap}). See <a
* href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the * href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the
* SurfaceTexture docs</a> for more information on external textures. * SurfaceTexture docs</a> for more information on external textures.
* @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will render * @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
* output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} * the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* automatically as {@link FrameProcessor} is done processing them. If {@code false}, the * {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* {@link FrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to * VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* render or drop the frame. * render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked. * @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}. * @param listener A {@link Listener}.
* @return A new instance. * @return A new instance.
* @throws FrameProcessingException If a problem occurs while creating the {@link * @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* FrameProcessor}. * VideoFrameProcessor}.
*/ */
FrameProcessor create( VideoFrameProcessor create(
Context context, Context context,
List<Effect> effects, List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -76,7 +76,7 @@ public interface FrameProcessor {
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor executor, Executor executor,
Listener listener) Listener listener)
throws FrameProcessingException; throws VideoFrameProcessingException;
} }
/** /**
@ -106,15 +106,15 @@ public interface FrameProcessor {
void onOutputFrameAvailable(long presentationTimeUs); void onOutputFrameAvailable(long presentationTimeUs);
/** /**
* Called when an exception occurs during asynchronous frame processing. * Called when an exception occurs during asynchronous video frame processing.
* *
* <p>If an error occurred, consuming and producing further frames will not work as expected and * <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link FrameProcessor} should be released. * the {@link VideoFrameProcessor} should be released.
*/ */
void onFrameProcessingError(FrameProcessingException exception); void onError(VideoFrameProcessingException exception);
/** Called after the {@link FrameProcessor} has produced its final output frame. */ /** Called after the {@link VideoFrameProcessor} has produced its final output frame. */
void onFrameProcessingEnded(); void onEnded();
} }
/** /**
@ -127,14 +127,14 @@ public interface FrameProcessor {
long DROP_OUTPUT_FRAME = -2; long DROP_OUTPUT_FRAME = -2;
/** /**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}. * Provides an input {@link Bitmap} to the {@code VideoFrameProcessor}.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code false}. * isInputTextureExternal} parameter is set to {@code false}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}. * @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds. * @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per * @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second. * second.
@ -144,9 +144,10 @@ public interface FrameProcessor {
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate); void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/** /**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. * Returns the input {@link Surface}, where {@code VideoFrameProcessor} consumes input frames
* from.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
@ -171,11 +172,11 @@ public interface FrameProcessor {
void setInputFrameInfo(FrameInfo inputFrameInfo); void setInputFrameInfo(FrameInfo inputFrameInfo);
/** /**
* Informs the {@code FrameProcessor} that a frame will be queued to its input surface. * Informs the {@code VideoFrameProcessor} that a frame will be queued to its input surface.
* *
* <p>Must be called before rendering a frame to the frame processor's input surface. * <p>Must be called before rendering a frame to the {@code VideoFrameProcessor}'s input surface.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
@ -189,7 +190,7 @@ public interface FrameProcessor {
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered} * Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet. * but not processed off the {@linkplain #getInputSurface() input surface} yet.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
@ -201,7 +202,7 @@ public interface FrameProcessor {
* dropped, they will be rendered to this output {@link SurfaceInfo}. * dropped, they will be rendered to this output {@link SurfaceInfo}.
* *
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards. * <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
* If the output {@link SurfaceInfo} is {@code null}, the {@code FrameProcessor} will stop * If the output {@link SurfaceInfo} is {@code null}, the {@code VideoFrameProcessor} will stop
* rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set. * rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set.
* *
* <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain * <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain
@ -235,7 +236,7 @@ public interface FrameProcessor {
void releaseOutputFrame(long releaseTimeNs); void releaseOutputFrame(long releaseTimeNs);
/** /**
* Informs the {@code FrameProcessor} that no further input frames should be accepted. * Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
* *
* <p>Can be called on any thread. * <p>Can be called on any thread.
* *
@ -244,12 +245,12 @@ public interface FrameProcessor {
void signalEndOfInput(); void signalEndOfInput();
/** /**
* Flushes the {@code FrameProcessor}. * Flushes the {@code VideoFrameProcessor}.
* *
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this * <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns. * method are no longer considered to be registered when this method returns.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* <p>{@link Listener} methods invoked prior to calling this method should be ignored. * <p>{@link Listener} methods invoked prior to calling this method should be ignored.
@ -259,10 +260,9 @@ public interface FrameProcessor {
/** /**
* Releases all resources. * Releases all resources.
* *
* <p>If the frame processor is released before it has {@linkplain * <p>If the {@code VideoFrameProcessor} is released before it has {@linkplain Listener#onEnded()
* Listener#onFrameProcessingEnded() ended}, it will attempt to cancel processing any input frames * ended}, it will attempt to cancel processing any input frames that have already become
* that have already become available. Input frames that become available after release are * available. Input frames that become available after release are ignored.
* ignored.
* *
* <p>This method blocks until all resources are released or releasing times out. * <p>This method blocks until all resources are released or releasing times out.
* *

View File

@ -33,7 +33,7 @@ import android.graphics.Color;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class ContrastPixelTest { public class ContrastPixelTest {
@ -89,7 +89,7 @@ public class ContrastPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (contrastShaderProgram != null) { if (contrastShaderProgram != null) {
contrastShaderProgram.release(); contrastShaderProgram.release();
} }
@ -198,7 +198,7 @@ public class ContrastPixelTest {
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
private void setupOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException { private void setupOutputTexture(int outputWidth, int outputHeight) throws Exception {
int outputTexId = int outputTexId =
GlUtil.createTexture( GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false); outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false);

View File

@ -30,7 +30,7 @@ import android.graphics.Bitmap;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -48,7 +48,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class CropPixelTest { public final class CropPixelTest {
@ -82,7 +82,7 @@ public final class CropPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (cropShaderProgram != null) { if (cropShaderProgram != null) {
cropShaderProgram.release(); cropShaderProgram.release();
} }

View File

@ -27,10 +27,10 @@ import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Matrix; import android.graphics.Matrix;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.media3.test.utils.FrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -39,10 +39,10 @@ import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** /**
* Pixel test for frame processing via {@link GlEffectsFrameProcessor}. * Pixel test for video frame processing via {@link DefaultVideoFrameProcessor}.
* *
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output * <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* matches expected output, either from a golden file or from another edit. * output matches expected output, either from a golden file or from another edit.
* *
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* bitmaps. * bitmaps.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorPixelTest { public final class DefaultVideoFrameProcessorPixelTest {
public static final String ORIGINAL_PNG_ASSET_PATH = public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png"; "media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH = public static final String WRAPPED_CROP_PNG_ASSET_PATH =
@ -81,20 +81,20 @@ public final class GlEffectsFrameProcessorPixelTest {
/** Input video of which we only use the first frame. */ /** Input video of which we only use the first frame. */
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4"; private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner; private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After @After
public void release() { public void release() {
checkNotNull(frameProcessorTestRunner).release(); checkNotNull(videoFrameProcessorTestRunner).release();
} }
@Test @Test
public void noEffects_matchesGoldenFile() throws Exception { public void noEffects_matchesGoldenFile() throws Exception {
String testId = "noEffects_matchesGoldenFile"; String testId = "noEffects_matchesGoldenFile";
frameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -105,11 +105,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void noEffects_withImageInput_matchesGoldenFile() throws Exception { public void noEffects_withImageInput_matchesGoldenFile() throws Exception {
String testId = "noEffects_withImageInput_matchesGoldenFile"; String testId = "noEffects_withImageInput_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build(); getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap); Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap);
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -120,7 +120,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void wrappedCrop_withImageInput_matchesGoldenFile() throws Exception { public void wrappedCrop_withImageInput_matchesGoldenFile() throws Exception {
String testId = "wrappedCrop_withImageInput_matchesGoldenFile"; String testId = "wrappedCrop_withImageInput_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setIsInputTextureExternal(false) .setIsInputTextureExternal(false)
.setEffects( .setEffects(
@ -134,7 +134,7 @@ public final class GlEffectsFrameProcessorPixelTest {
Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(originalBitmap); Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(originalBitmap);
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -148,13 +148,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void noEffects_withFrameCache_matchesGoldenFile() throws Exception { public void noEffects_withFrameCache_matchesGoldenFile() throws Exception {
String testId = "noEffects_withFrameCache_matchesGoldenFile"; String testId = "noEffects_withFrameCache_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new FrameCache(/* capacity= */ 5)) .setEffects(new FrameCache(/* capacity= */ 5))
.build(); .build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -165,11 +165,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void setPixelWidthHeightRatio_matchesGoldenFile() throws Exception { public void setPixelWidthHeightRatio_matchesGoldenFile() throws Exception {
String testId = "setPixelWidthHeightRatio_matchesGoldenFile"; String testId = "setPixelWidthHeightRatio_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setPixelWidthHeightRatio(2f).build(); getDefaultFrameProcessorTestRunnerBuilder(testId).setPixelWidthHeightRatio(2f).build();
Bitmap expectedBitmap = readBitmap(SCALE_WIDE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(SCALE_WIDE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -182,13 +182,13 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "matrixTransformation_matchesGoldenFile"; String testId = "matrixTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix(); Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0); translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects((MatrixTransformation) (long presentationTimeNs) -> translateRightMatrix) .setEffects((MatrixTransformation) (long presentationTimeNs) -> translateRightMatrix)
.build(); .build();
Bitmap expectedBitmap = readBitmap(TRANSLATE_RIGHT_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(TRANSLATE_RIGHT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -201,7 +201,7 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "matrixAndScaleToFitTransformation_matchesGoldenFile"; String testId = "matrixAndScaleToFitTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix(); Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0); translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix, (MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix,
@ -209,7 +209,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build(); .build();
Bitmap expectedBitmap = readBitmap(TRANSLATE_THEN_ROTATE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(TRANSLATE_THEN_ROTATE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -222,13 +222,13 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "bitmapOverlay_matchesGoldenFile"; String testId = "bitmapOverlay_matchesGoldenFile";
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH); Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap); BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay))) .setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
.build(); .build();
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -241,7 +241,7 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "scaleToFitAndMatrixTransformation_matchesGoldenFile"; String testId = "scaleToFitAndMatrixTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix(); Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0); translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build(), new ScaleToFitTransformation.Builder().setRotationDegrees(45).build(),
@ -249,7 +249,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build(); .build();
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_TRANSLATE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ROTATE_THEN_TRANSLATE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -260,13 +260,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void presentation_createForHeight_matchesGoldenFile() throws Exception { public void presentation_createForHeight_matchesGoldenFile() throws Exception {
String testId = "presentation_createForHeight_matchesGoldenFile"; String testId = "presentation_createForHeight_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(Presentation.createForHeight(480)) .setEffects(Presentation.createForHeight(480))
.build(); .build();
Bitmap expectedBitmap = readBitmap(REQUEST_OUTPUT_HEIGHT_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(REQUEST_OUTPUT_HEIGHT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -277,7 +277,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void cropThenPresentation_matchesGoldenFile() throws Exception { public void cropThenPresentation_matchesGoldenFile() throws Exception {
String testId = "cropThenPresentation_matchesGoldenFile"; String testId = "cropThenPresentation_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
new Crop( new Crop(
@ -287,7 +287,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build(); .build();
Bitmap expectedBitmap = readBitmap(CROP_THEN_ASPECT_RATIO_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(CROP_THEN_ASPECT_RATIO_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -298,13 +298,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void scaleToFitTransformation_rotate45_matchesGoldenFile() throws Exception { public void scaleToFitTransformation_rotate45_matchesGoldenFile() throws Exception {
String testId = "scaleToFitTransformation_rotate45_matchesGoldenFile"; String testId = "scaleToFitTransformation_rotate45_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new ScaleToFitTransformation.Builder().setRotationDegrees(45).build()) .setEffects(new ScaleToFitTransformation.Builder().setRotationDegrees(45).build())
.build(); .build();
Bitmap expectedBitmap = readBitmap(ROTATE45_SCALE_TO_FIT_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ROTATE45_SCALE_TO_FIT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -315,7 +315,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void twoWrappedScaleToFitTransformations_matchesGoldenFile() throws Exception { public void twoWrappedScaleToFitTransformations_matchesGoldenFile() throws Exception {
String testId = "twoWrappedScaleToFitTransformations_matchesGoldenFile"; String testId = "twoWrappedScaleToFitTransformations_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
new GlEffectWrapper( new GlEffectWrapper(
@ -327,7 +327,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build(); .build();
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_SCALE_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(ROTATE_THEN_SCALE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -346,20 +346,20 @@ public final class GlEffectsFrameProcessorPixelTest {
} }
full10StepRotationAndCenterCrop.add(centerCrop); full10StepRotationAndCenterCrop.add(centerCrop);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop") .setOutputFileLabel("centerCrop")
.setEffects(centerCrop) .setEffects(centerCrop)
.build(); .build();
Bitmap centerCropResultBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap centerCropResultBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release(); videoFrameProcessorTestRunner.release();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full10StepRotationAndCenterCrop") .setOutputFileLabel("full10StepRotationAndCenterCrop")
.setEffects(full10StepRotationAndCenterCrop.build()) .setEffects(full10StepRotationAndCenterCrop.build())
.build(); .build();
Bitmap fullRotationAndCenterCropResultBitmap = Bitmap fullRotationAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd(); videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -371,11 +371,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void increaseBrightness_matchesGoldenFile() throws Exception { public void increaseBrightness_matchesGoldenFile() throws Exception {
String testId = "increaseBrightness_matchesGoldenFile"; String testId = "increaseBrightness_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setEffects(new Brightness(0.5f)).build(); getDefaultFrameProcessorTestRunnerBuilder(testId).setEffects(new Brightness(0.5f)).build();
Bitmap expectedBitmap = readBitmap(INCREASE_BRIGHTNESS_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(INCREASE_BRIGHTNESS_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -399,7 +399,7 @@ public final class GlEffectsFrameProcessorPixelTest {
new RgbAdjustment.Builder().setBlueScale(5).build(), new RgbAdjustment.Builder().setBlueScale(5).build(),
new Rotation(/* degrees= */ 90), new Rotation(/* degrees= */ 90),
centerCrop); centerCrop);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop") .setOutputFileLabel("centerCrop")
.setEffects( .setEffects(
@ -407,16 +407,16 @@ public final class GlEffectsFrameProcessorPixelTest {
centerCrop) centerCrop)
.build(); .build();
Bitmap centerCropAndBrightnessIncreaseResultBitmap = Bitmap centerCropAndBrightnessIncreaseResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd(); videoFrameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release(); videoFrameProcessorTestRunner.release();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop") .setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop")
.setEffects(increaseBrightnessFullRotationCenterCrop) .setEffects(increaseBrightnessFullRotationCenterCrop)
.build(); .build();
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap = Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd(); videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -446,7 +446,7 @@ public final class GlEffectsFrameProcessorPixelTest {
new Rotation(/* degrees= */ 90), new Rotation(/* degrees= */ 90),
new FrameCache(/* capacity= */ 2), new FrameCache(/* capacity= */ 2),
centerCrop); centerCrop);
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop") .setOutputFileLabel("centerCrop")
.setEffects( .setEffects(
@ -454,16 +454,16 @@ public final class GlEffectsFrameProcessorPixelTest {
centerCrop) centerCrop)
.build(); .build();
Bitmap centerCropAndBrightnessIncreaseResultBitmap = Bitmap centerCropAndBrightnessIncreaseResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd(); videoFrameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release(); videoFrameProcessorTestRunner.release();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop") .setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop")
.setEffects(increaseBrightnessFullRotationCenterCrop) .setEffects(increaseBrightnessFullRotationCenterCrop)
.build(); .build();
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap = Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd(); videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -477,7 +477,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test @Test
public void grayscaleThenIncreaseRedChannel_matchesGoldenFile() throws Exception { public void grayscaleThenIncreaseRedChannel_matchesGoldenFile() throws Exception {
String testId = "grayscaleThenIncreaseRedChannel_matchesGoldenFile"; String testId = "grayscaleThenIncreaseRedChannel_matchesGoldenFile";
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects( .setEffects(
RgbFilter.createGrayscaleFilter(), RgbFilter.createGrayscaleFilter(),
@ -485,7 +485,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build(); .build();
Bitmap expectedBitmap = readBitmap(GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH); Bitmap expectedBitmap = readBitmap(GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data. // TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference = float averagePixelAbsoluteDifference =
@ -496,11 +496,11 @@ public final class GlEffectsFrameProcessorPixelTest {
// TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps, // TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps,
// using GlEffectWrapper to ensure usage of intermediate textures. // using GlEffectWrapper to ensure usage of intermediate textures.
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
return new FrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory()) .setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory())
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING); .setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING);
} }
@ -538,10 +538,10 @@ public final class GlEffectsFrameProcessorPixelTest {
} }
/** /**
* Wraps a {@link GlEffect} to prevent the {@link GlEffectsFrameProcessor} from detecting its * Wraps a {@link GlEffect} to prevent the {@link DefaultVideoFrameProcessor} from detecting its
* class and optimizing it. * class and optimizing it.
* *
* <p>This ensures that {@link GlEffectsFrameProcessor} uses a separate {@link GlShaderProgram} * <p>This ensures that {@link DefaultVideoFrameProcessor} uses a separate {@link GlShaderProgram}
* for the wrapped {@link GlEffect} rather than merging it with preceding or subsequent {@link * for the wrapped {@link GlEffect} rather than merging it with preceding or subsequent {@link
* GlEffect} instances and applying them in one combined {@link GlShaderProgram}. * GlEffect} instances and applying them in one combined {@link GlShaderProgram}.
*/ */
@ -555,7 +555,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Override @Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return effect.toGlShaderProgram(context, useHdr); return effect.toGlShaderProgram(context, useHdr);
} }
} }

View File

@ -27,9 +27,9 @@ import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -50,9 +50,9 @@ import org.junit.After;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** Tests for frame release in {@link GlEffectsFrameProcessor}. */ /** Tests for frame release in {@link DefaultVideoFrameProcessor}. */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorFrameReleaseTest { public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
private static final int WIDTH = 200; private static final int WIDTH = 200;
private static final int HEIGHT = 100; private static final int HEIGHT = 100;
@ -68,12 +68,12 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
private final LinkedBlockingQueue<Long> outputReleaseTimesNs = new LinkedBlockingQueue<>(); private final LinkedBlockingQueue<Long> outputReleaseTimesNs = new LinkedBlockingQueue<>();
private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor; private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
@After @After
public void release() { public void release() {
if (glEffectsFrameProcessor != null) { if (defaultVideoFrameProcessor != null) {
glEffectsFrameProcessor.release(); defaultVideoFrameProcessor.release();
} }
} }
@ -136,7 +136,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs}, /* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> { /* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs); actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs); checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
}, },
/* releaseFramesAutomatically= */ false); /* releaseFramesAutomatically= */ false);
@ -149,18 +149,18 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
public void controlledFrameRelease_withOneFrameRequestImmediateRelease_releasesFrame() public void controlledFrameRelease_withOneFrameRequestImmediateRelease_releasesFrame()
throws Exception { throws Exception {
long originalPresentationTimeUs = 1234; long originalPresentationTimeUs = 1234;
long releaseTimesNs = FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY; long releaseTimesNs = VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY;
AtomicLong actualPresentationTimeUs = new AtomicLong(); AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream( processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs}, /* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> { /* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs); actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs); checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
}, },
/* releaseFramesAutomatically= */ false); /* releaseFramesAutomatically= */ false);
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs); assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
// The actual release time is determined by the FrameProcessor when releasing the frame. // The actual release time is determined by the VideoFrameProcessor when releasing the frame.
ImmutableList<Long> actualReleaseTimesNs = ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1); waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1); assertThat(actualReleaseTimesNs).hasSize(1);
@ -175,14 +175,15 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs}, /* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> { /* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs); actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimeBeforeCurrentTimeNs); checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
}, },
/* releaseFramesAutomatically= */ false); /* releaseFramesAutomatically= */ false);
ImmutableList<Long> actualReleaseTimesNs = ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1); waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1); assertThat(actualReleaseTimesNs).hasSize(1);
// The actual release time is determined by the FrameProcessor when releasing the frame. // The actual release time is determined by the VideoFrameProcessor when releasing the frame.
assertThat(actualReleaseTimesNs.get(0)).isAtLeast(releaseTimeBeforeCurrentTimeNs); assertThat(actualReleaseTimesNs.get(0)).isAtLeast(releaseTimeBeforeCurrentTimeNs);
} }
@ -194,8 +195,8 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs}, /* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> { /* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeUs.set(presentationTimeNs); actualPresentationTimeUs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(FrameProcessor.DROP_OUTPUT_FRAME); .releaseOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
}, },
/* releaseFramesAutomatically= */ false); /* releaseFramesAutomatically= */ false);
@ -214,7 +215,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ originalPresentationTimesUs, /* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ presentationTimeUs -> { /* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimesUs.add(presentationTimeUs); actualPresentationTimesUs.add(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]); .releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
try { try {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep. // TODO(b/264252759): Investigate output frames being dropped and remove sleep.
@ -254,11 +255,11 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep. // TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call // Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior. // to swap buffers, to avoid this behavior.
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[0]); defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS); Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[1]); defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS); Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[2]); defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS); Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
assertThat(actualPresentationTimesUs) assertThat(actualPresentationTimesUs)
@ -276,19 +277,19 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
void onFrameAvailable(long presentationTimeUs); void onFrameAvailable(long presentationTimeUs);
} }
@EnsuresNonNull("glEffectsFrameProcessor") @EnsuresNonNull("defaultVideoFrameProcessor")
private void processFramesToEndOfStream( private void processFramesToEndOfStream(
long[] inputPresentationTimesUs, long[] inputPresentationTimesUs,
OnFrameAvailableListener onFrameAvailableListener, OnFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically) boolean releaseFramesAutomatically)
throws Exception { throws Exception {
AtomicReference<@NullableType FrameProcessingException> frameProcessingExceptionReference = AtomicReference<@NullableType VideoFrameProcessingException>
new AtomicReference<>(); videoFrameProcessingExceptionReference = new AtomicReference<>();
BlankFrameProducer blankFrameProducer = new BlankFrameProducer(); BlankFrameProducer blankFrameProducer = new BlankFrameProducer();
CountDownLatch frameProcessingEndedCountDownLatch = new CountDownLatch(1); CountDownLatch videoFrameProcessingEndedCountDownLatch = new CountDownLatch(1);
glEffectsFrameProcessor = defaultVideoFrameProcessor =
checkNotNull( checkNotNull(
new GlEffectsFrameProcessor.Factory() new DefaultVideoFrameProcessor.Factory()
.create( .create(
getApplicationContext(), getApplicationContext(),
ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer), ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
@ -298,7 +299,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* isInputTextureExternal= */ true, /* isInputTextureExternal= */ true,
releaseFramesAutomatically, releaseFramesAutomatically,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new FrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
ImageReader outputImageReader = ImageReader outputImageReader =
@ -307,7 +308,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
height, height,
PixelFormat.RGBA_8888, PixelFormat.RGBA_8888,
/* maxImages= */ inputPresentationTimesUs.length); /* maxImages= */ inputPresentationTimesUs.length);
checkNotNull(glEffectsFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.setOutputSurfaceInfo( .setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height)); new SurfaceInfo(outputImageReader.getSurface(), width, height));
outputImageReader.setOnImageAvailableListener( outputImageReader.setOnImageAvailableListener(
@ -325,34 +326,35 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
frameProcessingExceptionReference.set(exception); videoFrameProcessingExceptionReference.set(exception);
frameProcessingEndedCountDownLatch.countDown(); videoFrameProcessingEndedCountDownLatch.countDown();
} }
@Override @Override
public void onFrameProcessingEnded() { public void onEnded() {
frameProcessingEndedCountDownLatch.countDown(); videoFrameProcessingEndedCountDownLatch.countDown();
} }
})); }));
glEffectsFrameProcessor defaultVideoFrameProcessor
.getTaskExecutor() .getTaskExecutor()
.submit( .submit(
() -> { () -> {
blankFrameProducer.configureGlObjects(); blankFrameProducer.configureGlObjects();
checkNotNull(glEffectsFrameProcessor) checkNotNull(defaultVideoFrameProcessor)
.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build()); .setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build());
// A frame needs to be registered despite not queuing any external input to ensure // A frame needs to be registered despite not queuing any external input to ensure
// that // that
// the frame processor knows about the stream offset. // the video frame processor knows about the stream offset.
glEffectsFrameProcessor.registerInputFrame(); defaultVideoFrameProcessor.registerInputFrame();
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs); blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
}); });
frameProcessingEndedCountDownLatch.await(); videoFrameProcessingEndedCountDownLatch.await();
@Nullable Exception frameProcessingException = frameProcessingExceptionReference.get(); @Nullable
if (frameProcessingException != null) { Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get();
throw frameProcessingException; if (videoFrameProcessingException != null) {
throw videoFrameProcessingException;
} }
} }
@ -374,7 +376,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
private @MonotonicNonNull TextureInfo blankTexture; private @MonotonicNonNull TextureInfo blankTexture;
private @MonotonicNonNull OutputListener outputListener; private @MonotonicNonNull OutputListener outputListener;
public void configureGlObjects() throws FrameProcessingException { public void configureGlObjects() throws VideoFrameProcessingException {
try { try {
int texId = int texId =
GlUtil.createTexture(WIDTH, HEIGHT, /* useHighPrecisionColorComponents= */ false); GlUtil.createTexture(WIDTH, HEIGHT, /* useHighPrecisionColorComponents= */ false);
@ -383,7 +385,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
GlUtil.focusFramebufferUsingCurrentContext(fboId, WIDTH, HEIGHT); GlUtil.focusFramebufferUsingCurrentContext(fboId, WIDTH, HEIGHT);
GlUtil.clearOutputFrame(); GlUtil.clearOutputFrame();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }

View File

@ -32,7 +32,7 @@ import android.graphics.Color;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class HslAdjustmentPixelTest { public final class HslAdjustmentPixelTest {
@ -100,7 +100,7 @@ public final class HslAdjustmentPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (hslProcessor != null) { if (hslProcessor != null) {
hslProcessor.release(); hslProcessor.release();
} }

View File

@ -30,7 +30,7 @@ import android.graphics.Matrix;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -47,7 +47,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class MatrixShaderProgramPixelTest { public final class MatrixShaderProgramPixelTest {
@ -87,7 +87,7 @@ public final class MatrixShaderProgramPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }

View File

@ -35,7 +35,7 @@ import android.opengl.Matrix;
import android.text.Spannable; import android.text.Spannable;
import android.text.SpannableString; import android.text.SpannableString;
import android.text.style.ForegroundColorSpan; import android.text.style.ForegroundColorSpan;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -54,7 +54,7 @@ import org.junit.runner.RunWith;
* <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical * <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class OverlayShaderProgramPixelTest { public class OverlayShaderProgramPixelTest {
@ -101,7 +101,7 @@ public class OverlayShaderProgramPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (overlayShaderProgram != null) { if (overlayShaderProgram != null) {
overlayShaderProgram.release(); overlayShaderProgram.release();
} }

View File

@ -31,7 +31,7 @@ import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class PresentationPixelTest { public final class PresentationPixelTest {
@ -91,7 +91,7 @@ public final class PresentationPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (presentationShaderProgram != null) { if (presentationShaderProgram != null) {
presentationShaderProgram.release(); presentationShaderProgram.release();
} }

View File

@ -33,7 +33,7 @@ import android.graphics.Color;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -52,7 +52,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class RgbAdjustmentPixelTest { public final class RgbAdjustmentPixelTest {
@ -99,7 +99,7 @@ public final class RgbAdjustmentPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }

View File

@ -31,7 +31,7 @@ import android.graphics.Bitmap;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public final class RgbFilterPixelTest { public final class RgbFilterPixelTest {
@ -94,7 +94,7 @@ public final class RgbFilterPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }

View File

@ -32,7 +32,7 @@ import android.graphics.Color;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
import android.opengl.EGLSurface; import android.opengl.EGLSurface;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.test.utils.BitmapPixelTestUtil; import androidx.media3.test.utils.BitmapPixelTestUtil;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical * <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link * devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output * BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}. * bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class SingleColorLutPixelTest { public class SingleColorLutPixelTest {
@ -88,7 +88,7 @@ public class SingleColorLutPixelTest {
} }
@After @After
public void release() throws GlUtil.GlException, FrameProcessingException { public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (colorLutShaderProgram != null) { if (colorLutShaderProgram != null) {
colorLutShaderProgram.release(); colorLutShaderProgram.release();
} }

View File

@ -21,7 +21,7 @@ import android.graphics.Bitmap;
import android.net.Uri; import android.net.Uri;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.BitmapLoader; import androidx.media3.common.util.BitmapLoader;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -44,9 +44,9 @@ public abstract class BitmapOverlay extends TextureOverlay {
* Returns the overlay bitmap displayed at the specified timestamp. * Returns the overlay bitmap displayed at the specified timestamp.
* *
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException; public abstract Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException;
/** /**
* {@inheritDoc} * {@inheritDoc}
@ -61,7 +61,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
} }
@Override @Override
public int getTextureId(long presentationTimeUs) throws FrameProcessingException { public int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException {
Bitmap bitmap = getBitmap(presentationTimeUs); Bitmap bitmap = getBitmap(presentationTimeUs);
if (bitmap != lastBitmap) { if (bitmap != lastBitmap) {
try { try {
@ -79,7 +79,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
/* border= */ 0); /* border= */ 0);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
return lastTextureId; return lastTextureId;
@ -134,14 +134,14 @@ public abstract class BitmapOverlay extends TextureOverlay {
private @MonotonicNonNull Bitmap lastBitmap; private @MonotonicNonNull Bitmap lastBitmap;
@Override @Override
public Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException { public Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException {
if (lastBitmap == null) { if (lastBitmap == null) {
BitmapLoader bitmapLoader = new SimpleBitmapLoader(); BitmapLoader bitmapLoader = new SimpleBitmapLoader();
ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri); ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri);
try { try {
lastBitmap = future.get(); lastBitmap = future.get();
} catch (ExecutionException | InterruptedException e) { } catch (ExecutionException | InterruptedException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
return lastBitmap; return lastBitmap;

View File

@ -35,7 +35,7 @@ import java.util.Queue;
private final GlShaderProgram producingGlShaderProgram; private final GlShaderProgram producingGlShaderProgram;
private final GlShaderProgram consumingGlShaderProgram; private final GlShaderProgram consumingGlShaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this") @GuardedBy("this")
private final Queue<Pair<TextureInfo, Long>> availableFrames; private final Queue<Pair<TextureInfo, Long>> availableFrames;
@ -50,18 +50,18 @@ import java.util.Queue;
* as {@link OutputListener}. * as {@link OutputListener}.
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set * @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set
* as {@link InputListener}. * as {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that is
* OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be executed * used for OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be
* by the {@link FrameProcessingTaskExecutor}. The caller is responsible for releasing the * executed by the {@link VideoFrameProcessingTaskExecutor}. The caller is responsible for
* {@link FrameProcessingTaskExecutor}. * releasing the {@link VideoFrameProcessingTaskExecutor}.
*/ */
public ChainingGlShaderProgramListener( public ChainingGlShaderProgramListener(
GlShaderProgram producingGlShaderProgram, GlShaderProgram producingGlShaderProgram,
GlShaderProgram consumingGlShaderProgram, GlShaderProgram consumingGlShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) { VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.producingGlShaderProgram = producingGlShaderProgram; this.producingGlShaderProgram = producingGlShaderProgram;
this.consumingGlShaderProgram = consumingGlShaderProgram; this.consumingGlShaderProgram = consumingGlShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>(); availableFrames = new ArrayDeque<>();
} }
@ -75,9 +75,10 @@ import java.util.Queue;
long presentationTimeUs = pendingFrame.second; long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) { if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream); videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else { } else {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> () ->
consumingGlShaderProgram.queueInputFrame( consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs)); /* inputTexture= */ pendingFrame.first, presentationTimeUs));
@ -86,7 +87,7 @@ import java.util.Queue;
@Override @Override
public void onInputFrameProcessed(TextureInfo inputTexture) { public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> producingGlShaderProgram.releaseOutputFrame(inputTexture)); () -> producingGlShaderProgram.releaseOutputFrame(inputTexture));
} }
@ -94,14 +95,14 @@ import java.util.Queue;
public synchronized void onFlush() { public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0; consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear(); availableFrames.clear();
frameProcessingTaskExecutor.submit(producingGlShaderProgram::flush); videoFrameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
} }
@Override @Override
public synchronized void onOutputFrameAvailable( public synchronized void onOutputFrameAvailable(
TextureInfo outputTexture, long presentationTimeUs) { TextureInfo outputTexture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) { if (consumingGlShaderProgramInputCapacity > 0) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> () ->
consumingGlShaderProgram.queueInputFrame( consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ outputTexture, presentationTimeUs)); /* inputTexture= */ outputTexture, presentationTimeUs));
@ -116,7 +117,8 @@ import java.util.Queue;
if (!availableFrames.isEmpty()) { if (!availableFrames.isEmpty()) {
availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE)); availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else { } else {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream); videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} }
} }
} }

View File

@ -18,7 +18,7 @@ package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -45,7 +45,7 @@ public interface ColorLut extends GlEffect {
@Override @Override
@WorkerThread @WorkerThread
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr); return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
} }
} }

View File

@ -20,7 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -41,10 +41,10 @@ import java.io.IOException;
* @param colorLut The {@link ColorLut} to apply to each frame in order. * @param colorLut The {@link ColorLut} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr) public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// TODO(b/246315245): Add HDR support. // TODO(b/246315245): Add HDR support.
checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors."); checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors.");
@ -53,7 +53,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -73,7 +73,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -84,18 +85,18 @@ import java.io.IOException;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
colorLut.release(); colorLut.release();
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }

View File

@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
/** A {@link GlEffect} to control the contrast of video frames. */ /** A {@link GlEffect} to control the contrast of video frames. */
@ -42,7 +42,7 @@ public class Contrast implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new ContrastShaderProgram(context, this, useHdr); return new ContrastShaderProgram(context, this, useHdr);
} }
} }

View File

@ -18,7 +18,7 @@ package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -38,10 +38,10 @@ import java.io.IOException;
* @param contrastEffect The {@link Contrast} to apply to each frame in order. * @param contrastEffect The {@link Contrast} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr) public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// Use 1.0001f to avoid division by zero issues. // Use 1.0001f to avoid division by zero issues.
float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast); float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast);
@ -49,7 +49,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -70,7 +70,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -79,17 +80,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }

View File

@ -37,9 +37,9 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
@ -54,14 +54,14 @@ import java.util.concurrent.Future;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* A {@link FrameProcessor} implementation that applies {@link GlEffect} instances using OpenGL on a * A {@link VideoFrameProcessor} implementation that applies {@link GlEffect} instances using OpenGL
* background thread. * on a background thread.
*/ */
@UnstableApi @UnstableApi
public final class GlEffectsFrameProcessor implements FrameProcessor { public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A factory for {@link GlEffectsFrameProcessor} instances. */ /** A factory for {@link DefaultVideoFrameProcessor} instances. */
public static class Factory implements FrameProcessor.Factory { public static class Factory implements VideoFrameProcessor.Factory {
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -88,11 +88,11 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_1010102}. Otherwise, the context * be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_1010102}. Otherwise, the context
* will be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_8888}. * will be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_8888}.
* *
* <p>If invoking the {@code listener} on {@link GlEffectsFrameProcessor}'s internal thread is * <p>If invoking the {@code listener} on {@link DefaultVideoFrameProcessor}'s internal thread
* desired, pass a {@link MoreExecutors#directExecutor() direct listenerExecutor}. * is desired, pass a {@link MoreExecutors#directExecutor() direct listenerExecutor}.
*/ */
@Override @Override
public GlEffectsFrameProcessor create( public DefaultVideoFrameProcessor create(
Context context, Context context,
List<Effect> effects, List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -102,7 +102,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor listenerExecutor, Executor listenerExecutor,
Listener listener) Listener listener)
throws FrameProcessingException { throws VideoFrameProcessingException {
// TODO(b/261188041) Add tests to verify the Listener is invoked on the given Executor. // TODO(b/261188041) Add tests to verify the Listener is invoked on the given Executor.
checkArgument(inputColorInfo.isValid()); checkArgument(inputColorInfo.isValid());
@ -126,7 +126,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME); ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME);
Future<GlEffectsFrameProcessor> glFrameProcessorFuture = Future<DefaultVideoFrameProcessor> glFrameProcessorFuture =
singleThreadExecutorService.submit( singleThreadExecutorService.submit(
() -> () ->
createOpenGlObjectsAndFrameProcessor( createOpenGlObjectsAndFrameProcessor(
@ -144,10 +144,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
try { try {
return glFrameProcessorFuture.get(); return glFrameProcessorFuture.get();
} catch (ExecutionException e) { } catch (ExecutionException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }
@ -155,7 +155,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
/** /**
* Creates the OpenGL context, surfaces, textures, and frame buffers, initializes {@link * Creates the OpenGL context, surfaces, textures, and frame buffers, initializes {@link
* GlShaderProgram} instances corresponding to the {@link GlEffect} instances, and returns a new * GlShaderProgram} instances corresponding to the {@link GlEffect} instances, and returns a new
* {@code GlEffectsFrameProcessor}. * {@code DefaultVideoFrameProcessor}.
* *
* <p>All {@link Effect} instances must be {@link GlEffect} instances. * <p>All {@link Effect} instances must be {@link GlEffect} instances.
* *
@ -163,7 +163,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* commands will be called on that thread. * commands will be called on that thread.
*/ */
@WorkerThread @WorkerThread
private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor( private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor(
Context context, Context context,
List<Effect> effects, List<Effect> effects,
DebugViewProvider debugViewProvider, DebugViewProvider debugViewProvider,
@ -174,7 +174,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ExecutorService singleThreadExecutorService, ExecutorService singleThreadExecutorService,
Executor executor, Executor executor,
Listener listener) Listener listener)
throws GlUtil.GlException, FrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
checkState(Thread.currentThread().getName().equals(THREAD_NAME)); checkState(Thread.currentThread().getName().equals(THREAD_NAME));
// TODO(b/237674316): Delay initialization of things requiring the colorInfo, to // TODO(b/237674316): Delay initialization of things requiring the colorInfo, to
@ -198,7 +198,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
GlUtil.destroyEglContext(eglDisplay, eglContext); GlUtil.destroyEglContext(eglDisplay, eglContext);
// On API<33, the system cannot display PQ content correctly regardless of whether BT2020 PQ // On API<33, the system cannot display PQ content correctly regardless of whether BT2020 PQ
// GL extension is supported. // GL extension is supported.
throw new FrameProcessingException("BT.2020 PQ OpenGL output isn't supported."); throw new VideoFrameProcessingException("BT.2020 PQ OpenGL output isn't supported.");
} }
} }
@ -215,16 +215,16 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
releaseFramesAutomatically, releaseFramesAutomatically,
executor, executor,
listener); listener);
FrameProcessingTaskExecutor frameProcessingTaskExecutor = VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(singleThreadExecutorService, listener); new VideoFrameProcessingTaskExecutor(singleThreadExecutorService, listener);
chainShaderProgramsWithListeners( chainShaderProgramsWithListeners(
shaderPrograms, frameProcessingTaskExecutor, listener, executor); shaderPrograms, videoFrameProcessingTaskExecutor, listener, executor);
return new GlEffectsFrameProcessor( return new DefaultVideoFrameProcessor(
eglDisplay, eglDisplay,
eglContext, eglContext,
isInputTextureExternal, isInputTextureExternal,
frameProcessingTaskExecutor, videoFrameProcessingTaskExecutor,
shaderPrograms, shaderPrograms,
releaseFramesAutomatically); releaseFramesAutomatically);
} }
@ -252,7 +252,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor executor, Executor executor,
Listener listener) Listener listener)
throws FrameProcessingException { throws VideoFrameProcessingException {
ImmutableList.Builder<GlShaderProgram> shaderProgramListBuilder = new ImmutableList.Builder<>(); ImmutableList.Builder<GlShaderProgram> shaderProgramListBuilder = new ImmutableList.Builder<>();
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder = ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
new ImmutableList.Builder<>(); new ImmutableList.Builder<>();
@ -266,7 +266,8 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
.build(); .build();
for (int i = 0; i < effects.size(); i++) { for (int i = 0; i < effects.size(); i++) {
Effect effect = effects.get(i); Effect effect = effects.get(i);
checkArgument(effect instanceof GlEffect, "GlEffectsFrameProcessor only supports GlEffects"); checkArgument(
effect instanceof GlEffect, "DefaultVideoFrameProcessor only supports GlEffects");
GlEffect glEffect = (GlEffect) effect; GlEffect glEffect = (GlEffect) effect;
// The following logic may change the order of the RgbMatrix and GlMatrixTransformation // The following logic may change the order of the RgbMatrix and GlMatrixTransformation
// effects. This does not influence the output since RgbMatrix only changes the individual // effects. This does not influence the output since RgbMatrix only changes the individual
@ -333,18 +334,18 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
*/ */
private static void chainShaderProgramsWithListeners( private static void chainShaderProgramsWithListeners(
ImmutableList<GlShaderProgram> shaderPrograms, ImmutableList<GlShaderProgram> shaderPrograms,
FrameProcessingTaskExecutor frameProcessingTaskExecutor, VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
Listener frameProcessorListener, Listener videoFrameProcessorListener,
Executor frameProcessorListenerExecutor) { Executor videoFrameProcessorListenerExecutor) {
for (int i = 0; i < shaderPrograms.size() - 1; i++) { for (int i = 0; i < shaderPrograms.size() - 1; i++) {
GlShaderProgram producingGlShaderProgram = shaderPrograms.get(i); GlShaderProgram producingGlShaderProgram = shaderPrograms.get(i);
GlShaderProgram consumingGlShaderProgram = shaderPrograms.get(i + 1); GlShaderProgram consumingGlShaderProgram = shaderPrograms.get(i + 1);
ChainingGlShaderProgramListener chainingGlShaderProgramListener = ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener( new ChainingGlShaderProgramListener(
producingGlShaderProgram, consumingGlShaderProgram, frameProcessingTaskExecutor); producingGlShaderProgram, consumingGlShaderProgram, videoFrameProcessingTaskExecutor);
producingGlShaderProgram.setOutputListener(chainingGlShaderProgramListener); producingGlShaderProgram.setOutputListener(chainingGlShaderProgramListener);
producingGlShaderProgram.setErrorListener( producingGlShaderProgram.setErrorListener(
frameProcessorListenerExecutor, frameProcessorListener::onFrameProcessingError); videoFrameProcessorListenerExecutor, videoFrameProcessorListener::onError);
consumingGlShaderProgram.setInputListener(chainingGlShaderProgramListener); consumingGlShaderProgram.setInputListener(chainingGlShaderProgramListener);
} }
} }
@ -354,7 +355,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private final EGLDisplay eglDisplay; private final EGLDisplay eglDisplay;
private final EGLContext eglContext; private final EGLContext eglContext;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private @MonotonicNonNull InternalTextureManager inputInternalTextureManager; private @MonotonicNonNull InternalTextureManager inputInternalTextureManager;
private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager; private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager;
private final boolean releaseFramesAutomatically; private final boolean releaseFramesAutomatically;
@ -370,18 +371,18 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo; private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded; private volatile boolean inputStreamEnded;
private GlEffectsFrameProcessor( private DefaultVideoFrameProcessor(
EGLDisplay eglDisplay, EGLDisplay eglDisplay,
EGLContext eglContext, EGLContext eglContext,
boolean isInputTextureExternal, boolean isInputTextureExternal,
FrameProcessingTaskExecutor frameProcessingTaskExecutor, VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms, ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically) boolean releaseFramesAutomatically)
throws FrameProcessingException { throws VideoFrameProcessingException {
this.eglDisplay = eglDisplay; this.eglDisplay = eglDisplay;
this.eglContext = eglContext; this.eglContext = eglContext;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically; this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!shaderPrograms.isEmpty()); checkState(!shaderPrograms.isEmpty());
@ -393,11 +394,11 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(inputShaderProgram instanceof ExternalShaderProgram); checkState(inputShaderProgram instanceof ExternalShaderProgram);
inputExternalTextureManager = inputExternalTextureManager =
new ExternalTextureManager( new ExternalTextureManager(
(ExternalShaderProgram) inputShaderProgram, frameProcessingTaskExecutor); (ExternalShaderProgram) inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputExternalTextureManager); inputShaderProgram.setInputListener(inputExternalTextureManager);
} else { } else {
inputInternalTextureManager = inputInternalTextureManager =
new InternalTextureManager(inputShaderProgram, frameProcessingTaskExecutor); new InternalTextureManager(inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputInternalTextureManager); inputShaderProgram.setInputListener(inputInternalTextureManager);
} }
@ -406,10 +407,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
previousStreamOffsetUs = C.TIME_UNSET; previousStreamOffsetUs = C.TIME_UNSET;
} }
/** Returns the task executor that runs frame processing tasks. */ /** Returns the task executor that runs video frame processing tasks. */
@VisibleForTesting @VisibleForTesting
/* package */ FrameProcessingTaskExecutor getTaskExecutor() { /* package */ VideoFrameProcessingTaskExecutor getTaskExecutor() {
return frameProcessingTaskExecutor; return videoFrameProcessingTaskExecutor;
} }
/** /**
@ -421,7 +422,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* call this method after instantiation to ensure that buffers are handled at full resolution. See * call this method after instantiation to ensure that buffers are handled at full resolution. See
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information. * {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
* *
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code * <p>This method should only be used for when the {@link VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}. * isInputTextureExternal} parameter is set to {@code true}.
* *
* @param width The default width for input buffers, in pixels. * @param width The default width for input buffers, in pixels.
@ -476,7 +477,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState( checkState(
!releaseFramesAutomatically, !releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled"); "Calling this method is not allowed when releaseFramesAutomatically is enabled");
frameProcessingTaskExecutor.submitWithHighPriority( videoFrameProcessingTaskExecutor.submitWithHighPriority(
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs)); () -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
} }
@ -485,20 +486,20 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(!inputStreamEnded); checkState(!inputStreamEnded);
inputStreamEnded = true; inputStreamEnded = true;
if (inputInternalTextureManager != null) { if (inputInternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput); videoFrameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
} }
if (inputExternalTextureManager != null) { if (inputExternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput); videoFrameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
} }
} }
@Override @Override
public void flush() { public void flush() {
try { try {
frameProcessingTaskExecutor.flush(); videoFrameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1); CountDownLatch latch = new CountDownLatch(1);
checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown); checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown);
frameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush); videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await(); latch.await();
inputExternalTextureManager.setOnFlushCompleteListener(null); inputExternalTextureManager.setOnFlushCompleteListener(null);
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -509,7 +510,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
@Override @Override
public void release() { public void release() {
try { try {
frameProcessingTaskExecutor.release( videoFrameProcessingTaskExecutor.release(
/* releaseTask= */ this::releaseShaderProgramsAndDestroyGlContext, RELEASE_WAIT_TIME_MS); /* releaseTask= */ this::releaseShaderProgramsAndDestroyGlContext, RELEASE_WAIT_TIME_MS);
} catch (InterruptedException unexpected) { } catch (InterruptedException unexpected) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
@ -548,7 +549,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
*/ */
@WorkerThread @WorkerThread
private void releaseShaderProgramsAndDestroyGlContext() private void releaseShaderProgramsAndDestroyGlContext()
throws GlUtil.GlException, FrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
for (int i = 0; i < allShaderPrograms.size(); i++) { for (int i = 0; i < allShaderPrograms.size(); i++) {
allShaderPrograms.get(i).release(); allShaderPrograms.get(i).release();
} }

View File

@ -23,8 +23,8 @@ import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.effect.GlShaderProgram.InputListener; import androidx.media3.effect.GlShaderProgram.InputListener;
import java.util.Queue; import java.util.Queue;
@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger;
*/ */
/* package */ final class ExternalTextureManager implements InputListener { /* package */ final class ExternalTextureManager implements InputListener {
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final ExternalShaderProgram externalShaderProgram; private final ExternalShaderProgram externalShaderProgram;
private final int externalTexId; private final int externalTexId;
private final Surface surface; private final Surface surface;
@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Nullable private volatile FrameInfo currentFrame; @Nullable private volatile FrameInfo currentFrame;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead. // TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile FrameProcessingTask onFlushCompleteTask; @Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs; private long previousStreamOffsetUs;
@ -70,21 +70,21 @@ import java.util.concurrent.atomic.AtomicInteger;
* *
* @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code * @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code
* ExternalTextureManager} will be set as the {@link InputListener}. * ExternalTextureManager} will be set as the {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor}. * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
* @throws FrameProcessingException If a problem occurs while creating the external texture. * @throws VideoFrameProcessingException If a problem occurs while creating the external texture.
*/ */
// The onFrameAvailableListener will not be invoked until the constructor returns. // The onFrameAvailableListener will not be invoked until the constructor returns.
@SuppressWarnings("nullness:method.invocation.invalid") @SuppressWarnings("nullness:method.invocation.invalid")
public ExternalTextureManager( public ExternalTextureManager(
ExternalShaderProgram externalShaderProgram, ExternalShaderProgram externalShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor)
throws FrameProcessingException { throws VideoFrameProcessingException {
this.externalShaderProgram = externalShaderProgram; this.externalShaderProgram = externalShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
try { try {
externalTexId = GlUtil.createExternalTexture(); externalTexId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
surfaceTexture = new SurfaceTexture(externalTexId); surfaceTexture = new SurfaceTexture(externalTexId);
textureTransformMatrix = new float[16]; textureTransformMatrix = new float[16];
@ -93,7 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger;
previousStreamOffsetUs = C.TIME_UNSET; previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener( surfaceTexture.setOnFrameAvailableListener(
unused -> unused ->
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
if (numberOfFramesToDropOnBecomingAvailable > 0) { if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--; numberOfFramesToDropOnBecomingAvailable--;
@ -119,7 +119,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public void onReadyToAcceptInputFrame() { public void onReadyToAcceptInputFrame() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
externalShaderProgramInputCapacity.incrementAndGet(); externalShaderProgramInputCapacity.incrementAndGet();
maybeQueueFrameToExternalShaderProgram(); maybeQueueFrameToExternalShaderProgram();
@ -128,7 +128,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public void onInputFrameProcessed(TextureInfo inputTexture) { public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
currentFrame = null; currentFrame = null;
maybeQueueFrameToExternalShaderProgram(); maybeQueueFrameToExternalShaderProgram();
@ -136,13 +136,13 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
/** Sets the task to run on completing flushing, or {@code null} to clear any task. */ /** Sets the task to run on completing flushing, or {@code null} to clear any task. */
public void setOnFlushCompleteListener(@Nullable FrameProcessingTask task) { public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
onFlushCompleteTask = task; onFlushCompleteTask = task;
} }
@Override @Override
public void onFlush() { public void onFlush() {
frameProcessingTaskExecutor.submit(this::flush); videoFrameProcessingTaskExecutor.submit(this::flush);
} }
/** /**
@ -169,10 +169,10 @@ import java.util.concurrent.atomic.AtomicInteger;
/** /**
* Signals the end of the input. * Signals the end of the input.
* *
* @see FrameProcessor#signalEndOfInput() * @see VideoFrameProcessor#signalEndOfInput()
*/ */
public void signalEndOfInput() { public void signalEndOfInput() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputStreamEnded = true; inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) { if (pendingFrames.isEmpty() && currentFrame == null) {
@ -204,7 +204,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) { if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return; return;
} }
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask); videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
} }
@WorkerThread @WorkerThread

View File

@ -36,9 +36,9 @@ import androidx.annotation.WorkerThread;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -59,7 +59,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* the frames to the dimensions specified by the provided {@link SurfaceInfo}. * the frames to the dimensions specified by the provided {@link SurfaceInfo}.
* *
* <p>This wrapper is used for the final {@link GlShaderProgram} instance in the chain of {@link * <p>This wrapper is used for the final {@link GlShaderProgram} instance in the chain of {@link
* GlShaderProgram} instances used by {@link FrameProcessor}. * GlShaderProgram} instances used by {@link VideoFrameProcessor}.
*/ */
/* package */ final class FinalMatrixShaderProgramWrapper implements ExternalShaderProgram { /* package */ final class FinalMatrixShaderProgramWrapper implements ExternalShaderProgram {
@ -76,8 +76,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final ColorInfo inputColorInfo; private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo; private final ColorInfo outputColorInfo;
private final boolean releaseFramesAutomatically; private final boolean releaseFramesAutomatically;
private final Executor frameProcessorListenerExecutor; private final Executor videoFrameProcessorListenerExecutor;
private final FrameProcessor.Listener frameProcessorListener; private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final float[] textureTransformMatrix; private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue; private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<TextureInfo, Long>> availableFrames; private final Queue<Pair<TextureInfo, Long>> availableFrames;
@ -112,8 +112,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean sampleFromInputTexture, boolean sampleFromInputTexture,
boolean isInputTextureExternal, boolean isInputTextureExternal,
boolean releaseFramesAutomatically, boolean releaseFramesAutomatically,
Executor frameProcessorListenerExecutor, Executor videoFrameProcessorListenerExecutor,
FrameProcessor.Listener frameProcessorListener) { VideoFrameProcessor.Listener videoFrameProcessorListener) {
this.context = context; this.context = context;
this.matrixTransformations = matrixTransformations; this.matrixTransformations = matrixTransformations;
this.rgbMatrices = rgbMatrices; this.rgbMatrices = rgbMatrices;
@ -125,8 +125,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.inputColorInfo = inputColorInfo; this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo; this.outputColorInfo = outputColorInfo;
this.releaseFramesAutomatically = releaseFramesAutomatically; this.releaseFramesAutomatically = releaseFramesAutomatically;
this.frameProcessorListenerExecutor = frameProcessorListenerExecutor; this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
this.frameProcessorListener = frameProcessorListener; this.videoFrameProcessorListener = videoFrameProcessorListener;
textureTransformMatrix = GlUtil.create4x4IdentityMatrix(); textureTransformMatrix = GlUtil.create4x4IdentityMatrix();
streamOffsetUsQueue = new ConcurrentLinkedQueue<>(); streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
@ -142,13 +142,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void setOutputListener(OutputListener outputListener) { public void setOutputListener(OutputListener outputListener) {
// The FrameProcessor.Listener passed to the constructor is used for output-related events. // The VideoFrameProcessor.Listener passed to the constructor is used for output-related events.
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override @Override
public void setErrorListener(Executor executor, ErrorListener errorListener) { public void setErrorListener(Executor executor, ErrorListener errorListener) {
// The FrameProcessor.Listener passed to the constructor is used for errors. // The VideoFrameProcessor.Listener passed to the constructor is used for errors.
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -157,8 +157,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long streamOffsetUs = long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified."); checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs; long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs;
frameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> frameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs)); () -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
if (releaseFramesAutomatically) { if (releaseFramesAutomatically) {
renderFrameToSurfaces( renderFrameToSurfaces(
inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000); inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
@ -189,7 +189,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end."); checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove(); streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) { if (streamOffsetUsQueue.isEmpty()) {
frameProcessorListenerExecutor.execute(frameProcessorListener::onFrameProcessingEnded); videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
} }
} }
@ -206,14 +206,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
@WorkerThread @WorkerThread
public synchronized void release() throws FrameProcessingException { public synchronized void release() throws VideoFrameProcessingException {
if (matrixShaderProgram != null) { if (matrixShaderProgram != null) {
matrixShaderProgram.release(); matrixShaderProgram.release();
} }
try { try {
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface); GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
@ -247,7 +247,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* Sets the output {@link SurfaceInfo}. * Sets the output {@link SurfaceInfo}.
* *
* @see FrameProcessor#setOutputSurfaceInfo(SurfaceInfo) * @see VideoFrameProcessor#setOutputSurfaceInfo(SurfaceInfo)
*/ */
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) { public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) { if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
@ -257,9 +257,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
try { try {
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface); GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
frameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> () -> videoFrameProcessorListener.onError(VideoFrameProcessingException.from(e)));
frameProcessorListener.onFrameProcessingError(FrameProcessingException.from(e)));
} }
this.outputEglSurface = null; this.outputEglSurface = null;
} }
@ -277,11 +276,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) { TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) {
try { try {
maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs); maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs);
} catch (FrameProcessingException | GlUtil.GlException e) { } catch (VideoFrameProcessingException | GlUtil.GlException e) {
frameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> () ->
frameProcessorListener.onFrameProcessingError( videoFrameProcessorListener.onError(
FrameProcessingException.from(e, presentationTimeUs))); VideoFrameProcessingException.from(e, presentationTimeUs)));
} }
maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs); maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
@ -289,8 +288,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private synchronized void maybeRenderFrameToOutputSurface( private synchronized void maybeRenderFrameToOutputSurface(
TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs)
throws FrameProcessingException, GlUtil.GlException { throws VideoFrameProcessingException, GlUtil.GlException {
if (releaseTimeNs == FrameProcessor.DROP_OUTPUT_FRAME if (releaseTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|| !ensureConfigured(inputTexture.width, inputTexture.height)) { || !ensureConfigured(inputTexture.width, inputTexture.height)) {
return; // Drop frames when requested, or there is no output surface. return; // Drop frames when requested, or there is no output surface.
} }
@ -311,7 +310,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
EGLExt.eglPresentationTimeANDROID( EGLExt.eglPresentationTimeANDROID(
eglDisplay, eglDisplay,
outputEglSurface, outputEglSurface,
releaseTimeNs == FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY releaseTimeNs == VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY
? System.nanoTime() ? System.nanoTime()
: releaseTimeNs); : releaseTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface); EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
@ -321,7 +320,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
expression = {"outputSurfaceInfo", "outputEglSurface", "matrixShaderProgram"}, expression = {"outputSurfaceInfo", "outputEglSurface", "matrixShaderProgram"},
result = true) result = true)
private synchronized boolean ensureConfigured(int inputWidth, int inputHeight) private synchronized boolean ensureConfigured(int inputWidth, int inputHeight)
throws FrameProcessingException, GlUtil.GlException { throws VideoFrameProcessingException, GlUtil.GlException {
if (this.inputWidth != inputWidth if (this.inputWidth != inputWidth
|| this.inputHeight != inputHeight || this.inputHeight != inputHeight
@ -333,9 +332,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (!Util.areEqual( if (!Util.areEqual(
this.outputSizeBeforeSurfaceTransformation, outputSizeBeforeSurfaceTransformation)) { this.outputSizeBeforeSurfaceTransformation, outputSizeBeforeSurfaceTransformation)) {
this.outputSizeBeforeSurfaceTransformation = outputSizeBeforeSurfaceTransformation; this.outputSizeBeforeSurfaceTransformation = outputSizeBeforeSurfaceTransformation;
frameProcessorListenerExecutor.execute( videoFrameProcessorListenerExecutor.execute(
() -> () ->
frameProcessorListener.onOutputSizeChanged( videoFrameProcessorListener.onOutputSizeChanged(
outputSizeBeforeSurfaceTransformation.getWidth(), outputSizeBeforeSurfaceTransformation.getWidth(),
outputSizeBeforeSurfaceTransformation.getHeight())); outputSizeBeforeSurfaceTransformation.getHeight()));
} }
@ -389,7 +388,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
private MatrixShaderProgram createMatrixShaderProgramForOutputSurface( private MatrixShaderProgram createMatrixShaderProgramForOutputSurface(
SurfaceInfo outputSurfaceInfo) throws FrameProcessingException { SurfaceInfo outputSurfaceInfo) throws VideoFrameProcessingException {
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder = ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
new ImmutableList.Builder<GlMatrixTransformation>().addAll(matrixTransformations); new ImmutableList.Builder<GlMatrixTransformation>().addAll(matrixTransformations);
if (outputSurfaceInfo.orientationDegrees != 0) { if (outputSurfaceInfo.orientationDegrees != 0) {
@ -453,7 +452,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
matrixShaderProgram.drawFrame(inputTexture.texId, presentationTimeUs); matrixShaderProgram.drawFrame(inputTexture.texId, presentationTimeUs);
matrixShaderProgram.setOutputColorTransfer(configuredColorTransfer); matrixShaderProgram.setOutputColorTransfer(configuredColorTransfer);
}); });
} catch (FrameProcessingException | GlUtil.GlException e) { } catch (VideoFrameProcessingException | GlUtil.GlException e) {
Log.d(TAG, "Error rendering to debug preview", e); Log.d(TAG, "Error rendering to debug preview", e);
} }
} }
@ -502,8 +501,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* otherwise. * otherwise.
*/ */
@WorkerThread @WorkerThread
public synchronized void maybeRenderToSurfaceView(FrameProcessingTask renderingTask) public synchronized void maybeRenderToSurfaceView(VideoFrameProcessingTask renderingTask)
throws GlUtil.GlException, FrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
if (surface == null) { if (surface == null) {
return; return;
} }

View File

@ -19,14 +19,14 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import androidx.annotation.IntRange; import androidx.annotation.IntRange;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
/** /**
* Caches the input frames. * Caches the input frames.
* *
* <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for * <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for
* the possible fluctuation in frame processing time between frames. * the possible fluctuation in video frame processing time between frames.
*/ */
@UnstableApi @UnstableApi
public final class FrameCache implements GlEffect { public final class FrameCache implements GlEffect {
@ -51,7 +51,7 @@ public final class FrameCache implements GlEffect {
@Override @Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new FrameCacheShaderProgram(context, capacity, useHdr); return new FrameCacheShaderProgram(context, capacity, useHdr);
} }
} }

View File

@ -19,7 +19,7 @@ import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
@ -54,7 +54,7 @@ import java.util.concurrent.Executor;
/** Creates a new instance. */ /** Creates a new instance. */
public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr) public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
freeOutputTextures = new ArrayDeque<>(); freeOutputTextures = new ArrayDeque<>();
inUseOutputTextures = new ArrayDeque<>(); inUseOutputTextures = new ArrayDeque<>();
try { try {
@ -64,7 +64,7 @@ import java.util.concurrent.Executor;
VERTEX_SHADER_TRANSFORMATION_ES2_PATH, VERTEX_SHADER_TRANSFORMATION_ES2_PATH,
FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH); FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw FrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }
this.capacity = capacity; this.capacity = capacity;
this.useHdr = useHdr; this.useHdr = useHdr;
@ -80,7 +80,7 @@ import java.util.concurrent.Executor;
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = frameProcessingException -> {}; errorListener = videoFrameProcessingException -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
} }
@ -129,7 +129,7 @@ import java.util.concurrent.Executor;
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs); outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (GlUtil.GlException | NoSuchElementException e) { } catch (GlUtil.GlException | NoSuchElementException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(FrameProcessingException.from(e))); () -> errorListener.onError(VideoFrameProcessingException.from(e)));
} }
} }
@ -167,11 +167,11 @@ import java.util.concurrent.Executor;
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
try { try {
deleteAllOutputTextures(); deleteAllOutputTextures();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }

View File

@ -17,7 +17,7 @@ package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
/** /**
@ -36,10 +36,11 @@ public interface GlEffect extends Effect {
* @param context A {@link Context}. * @param context A {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If an error occurs while creating the {@link GlShaderProgram}. * @throws VideoFrameProcessingException If an error occurs while creating the {@link
* GlShaderProgram}.
*/ */
GlShaderProgram toGlShaderProgram(Context context, boolean useHdr) GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException; throws VideoFrameProcessingException;
/** /**
* Returns whether a {@link GlEffect} applies no change at every timestamp. * Returns whether a {@link GlEffect} applies no change at every timestamp.

View File

@ -17,7 +17,7 @@ package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import android.opengl.Matrix; import android.opengl.Matrix;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -54,7 +54,7 @@ public interface GlMatrixTransformation extends GlEffect {
@Override @Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return MatrixShaderProgram.create( return MatrixShaderProgram.create(
context, context,
/* matrixTransformations= */ ImmutableList.of(this), /* matrixTransformations= */ ImmutableList.of(this),

View File

@ -15,7 +15,7 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
@ -47,7 +47,7 @@ import java.util.concurrent.Executor;
public interface GlShaderProgram { public interface GlShaderProgram {
/** /**
* Listener for input-related frame processing events. * Listener for input-related video frame processing events.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
@ -81,7 +81,7 @@ public interface GlShaderProgram {
} }
/** /**
* Listener for output-related frame processing events. * Listener for output-related video frame processing events.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
@ -108,26 +108,26 @@ public interface GlShaderProgram {
} }
/** /**
* Listener for frame processing errors. * Listener for video frame processing errors.
* *
* <p>This listener can be called from any thread. * <p>This listener can be called from any thread.
*/ */
interface ErrorListener { interface ErrorListener {
/** /**
* Called when an exception occurs during asynchronous frame processing. * Called when an exception occurs during asynchronous video frame processing.
* *
* <p>If an error occurred, consuming and producing further frames will not work as expected and * <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link GlShaderProgram} should be released. * the {@link GlShaderProgram} should be released.
*/ */
void onFrameProcessingError(FrameProcessingException e); void onError(VideoFrameProcessingException e);
} }
/** /**
* Sets the {@link InputListener}. * Sets the {@link InputListener}.
* *
* <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL * <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link InputListener} methods * context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link InputListener}
* on its internal thread. * methods on its internal thread.
*/ */
void setInputListener(InputListener inputListener); void setInputListener(InputListener inputListener);
@ -135,7 +135,7 @@ public interface GlShaderProgram {
* Sets the {@link OutputListener}. * Sets the {@link OutputListener}.
* *
* <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL * <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link OutputListener} * context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link OutputListener}
* methods on its internal thread. * methods on its internal thread.
*/ */
void setOutputListener(OutputListener outputListener); void setOutputListener(OutputListener outputListener);
@ -190,7 +190,7 @@ public interface GlShaderProgram {
/** /**
* Releases all resources. * Releases all resources.
* *
* @throws FrameProcessingException If an error occurs while releasing resources. * @throws VideoFrameProcessingException If an error occurs while releasing resources.
*/ */
void release() throws FrameProcessingException; void release() throws VideoFrameProcessingException;
} }

View File

@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
@ -114,7 +114,7 @@ public class HslAdjustment implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr); return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr);
} }
} }

View File

@ -20,7 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -40,10 +40,10 @@ import java.io.IOException;
* @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order. * @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr) public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
// TODO(b/241241680): Check if HDR <-> HSL works the same or not. // TODO(b/241241680): Check if HDR <-> HSL works the same or not.
checkArgument(!useHdr, "HDR is not yet supported."); checkArgument(!useHdr, "HDR is not yet supported.");
@ -51,7 +51,7 @@ import java.io.IOException;
try { try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH); glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y. // Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -78,7 +78,8 @@ import java.io.IOException;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0); glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -87,7 +88,7 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad. // The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
} }

View File

@ -23,22 +23,23 @@ import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.annotation.WorkerThread; import androidx.annotation.WorkerThread;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
/** /**
* Forwards a frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for consumption. * Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for
* consumption.
* *
* <p>Methods in this class can be called from any thread. * <p>Methods in this class can be called from any thread.
*/ */
@UnstableApi @UnstableApi
/* package */ final class InternalTextureManager implements GlShaderProgram.InputListener { /* package */ final class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram; private final GlShaderProgram shaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor; private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream. // The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps; private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
@ -53,13 +54,14 @@ import java.util.concurrent.LinkedBlockingQueue;
* *
* @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager} * @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}. * will be set as the {@link GlShaderProgram.InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that the methods of * @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that the
* this class run on. * methods of this class run on.
*/ */
public InternalTextureManager( public InternalTextureManager(
GlShaderProgram shaderProgram, FrameProcessingTaskExecutor frameProcessingTaskExecutor) { GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.shaderProgram = shaderProgram; this.shaderProgram = shaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor; this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
pendingBitmaps = new LinkedBlockingQueue<>(); pendingBitmaps = new LinkedBlockingQueue<>();
} }
@ -69,7 +71,7 @@ import java.util.concurrent.LinkedBlockingQueue;
// program and change to only allocate one texId at a time. A change to the // program and change to only allocate one texId at a time. A change to the
// onInputFrameProcessed() method signature to include presentationTimeUs will probably be // onInputFrameProcessed() method signature to include presentationTimeUs will probably be
// needed to do this. // needed to do this.
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
downstreamShaderProgramCapacity++; downstreamShaderProgramCapacity++;
maybeQueueToShaderProgram(); maybeQueueToShaderProgram();
@ -79,21 +81,21 @@ import java.util.concurrent.LinkedBlockingQueue;
/** /**
* Provides an input {@link Bitmap} to put into the video frames. * Provides an input {@link Bitmap} to put into the video frames.
* *
* @see FrameProcessor#queueInputBitmap * @see VideoFrameProcessor#queueInputBitmap
*/ */
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) { Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr)); () -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr));
} }
/** /**
* Signals the end of the input. * Signals the end of the input.
* *
* @see FrameProcessor#signalEndOfInput() * @see VideoFrameProcessor#signalEndOfInput()
*/ */
public void signalEndOfInput() { public void signalEndOfInput() {
frameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
inputEnded = true; inputEnded = true;
maybeSignalEndOfOutput(); maybeSignalEndOfOutput();
@ -102,7 +104,7 @@ import java.util.concurrent.LinkedBlockingQueue;
@WorkerThread @WorkerThread
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr) private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
if (inputEnded) { if (inputEnded) {
return; return;
@ -116,7 +118,7 @@ import java.util.concurrent.LinkedBlockingQueue;
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw FrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }
TextureInfo textureInfo = TextureInfo textureInfo =
new TextureInfo( new TextureInfo(

View File

@ -24,7 +24,7 @@ import android.opengl.Matrix;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -143,15 +143,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations. * empty to apply no color transformations.
* @param useHdr Whether input and output colors are HDR. * @param useHdr Whether input and output colors are HDR.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram create( public static MatrixShaderProgram create(
Context context, Context context,
List<GlMatrixTransformation> matrixTransformations, List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
boolean useHdr) boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
GlProgram glProgram = GlProgram glProgram =
createGlProgram( createGlProgram(
context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH); context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH);
@ -185,8 +185,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}. * @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain * If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. * ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createWithInternalSampler( public static MatrixShaderProgram createWithInternalSampler(
Context context, Context context,
@ -194,7 +194,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkState( checkState(
!ColorInfo.isTransferHdr(inputColorInfo), !ColorInfo.isTransferHdr(inputColorInfo),
"MatrixShaderProgram doesn't support HDR internal sampler input yet."); "MatrixShaderProgram doesn't support HDR internal sampler input yet.");
@ -229,8 +229,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}. * @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain * If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not. * ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createWithExternalSampler( public static MatrixShaderProgram createWithExternalSampler(
Context context, Context context,
@ -238,7 +238,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo); boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
String vertexShaderFilePath = String vertexShaderFilePath =
isInputTransferHdr isInputTransferHdr
@ -272,15 +272,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be * @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations. * empty to apply no color transformations.
* @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors. * @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL * @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* operation fails or is unsupported. * OpenGL operation fails or is unsupported.
*/ */
public static MatrixShaderProgram createApplyingOetf( public static MatrixShaderProgram createApplyingOetf(
Context context, Context context,
List<GlMatrixTransformation> matrixTransformations, List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo); boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo);
String vertexShaderFilePath = String vertexShaderFilePath =
outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH; outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH;
@ -317,7 +317,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices, List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo) ColorInfo outputColorInfo)
throws FrameProcessingException { throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo); boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer; @C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
if (isInputTransferHdr) { if (isInputTransferHdr) {
@ -325,7 +325,7 @@ import java.util.List;
// In HDR editing mode the decoder output is sampled in YUV. // In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) { if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException( throw new VideoFrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input."); "The EXT_YUV_target extension is required for HDR editing input.");
} }
glProgram.setFloatsUniform( glProgram.setFloatsUniform(
@ -398,13 +398,13 @@ import java.util.List;
private static GlProgram createGlProgram( private static GlProgram createGlProgram(
Context context, String vertexShaderFilePath, String fragmentShaderFilePath) Context context, String vertexShaderFilePath, String fragmentShaderFilePath)
throws FrameProcessingException { throws VideoFrameProcessingException {
GlProgram glProgram; GlProgram glProgram;
try { try {
glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath); glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
} catch (IOException | GlUtil.GlException e) { } catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
float[] identityMatrix = GlUtil.create4x4IdentityMatrix(); float[] identityMatrix = GlUtil.create4x4IdentityMatrix();
@ -423,7 +423,8 @@ import java.util.List;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
updateCompositeRgbaMatrixArray(presentationTimeUs); updateCompositeRgbaMatrixArray(presentationTimeUs);
updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs); updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs);
if (visiblePolygon.size() < 3) { if (visiblePolygon.size() < 3) {
@ -444,17 +445,17 @@ import java.util.List;
GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size()); GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size());
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }

View File

@ -16,7 +16,7 @@
package androidx.media3.effect; package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -40,7 +40,7 @@ public final class OverlayEffect implements GlEffect {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return new OverlayShaderProgram(context, useHdr, overlays); return new OverlayShaderProgram(context, useHdr, overlays);
} }
} }

View File

@ -21,7 +21,7 @@ import android.content.Context;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.util.Pair; import android.util.Pair;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlProgram; import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -49,11 +49,11 @@ import com.google.common.collect.ImmutableList;
* @param context The {@link Context}. * @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be * @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709. * in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files. * @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/ */
public OverlayShaderProgram( public OverlayShaderProgram(
Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays) Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays)
throws FrameProcessingException { throws VideoFrameProcessingException {
super(useHdr); super(useHdr);
checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet."); checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet.");
// The maximum number of samplers allowed in a single GL program is 16. // The maximum number of samplers allowed in a single GL program is 16.
@ -70,7 +70,7 @@ import com.google.common.collect.ImmutableList;
glProgram = glProgram =
new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size())); new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size()));
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
glProgram.setBufferAttribute( glProgram.setBufferAttribute(
@ -91,7 +91,8 @@ import com.google.common.collect.ImmutableList;
} }
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException { public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try { try {
glProgram.use(); glProgram.use();
if (!overlays.isEmpty()) { if (!overlays.isEmpty()) {
@ -155,17 +156,17 @@ import com.google.common.collect.ImmutableList;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError(); GlUtil.checkGlError();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
} }
@Override @Override
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
super.release(); super.release();
try { try {
glProgram.delete(); glProgram.delete();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }

View File

@ -19,7 +19,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context; import android.content.Context;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -92,7 +92,7 @@ public class RgbFilter implements RgbMatrix {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkForConsistentHdrSetting(useHdr); checkForConsistentHdrSetting(useHdr);
return RgbMatrix.super.toGlShaderProgram(context, useHdr); return RgbMatrix.super.toGlShaderProgram(context, useHdr);
} }

View File

@ -17,7 +17,7 @@
package androidx.media3.effect; package androidx.media3.effect;
import android.content.Context; import android.content.Context;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -41,7 +41,7 @@ public interface RgbMatrix extends GlEffect {
@Override @Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
return MatrixShaderProgram.create( return MatrixShaderProgram.create(
context, context,
/* matrixTransformations= */ ImmutableList.of(), /* matrixTransformations= */ ImmutableList.of(),

View File

@ -24,7 +24,7 @@ import android.graphics.Bitmap;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
@ -150,13 +150,13 @@ public class SingleColorLut implements ColorLut {
@Override @Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr) public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException { throws VideoFrameProcessingException {
checkState(!useHdr, "HDR is currently not supported."); checkState(!useHdr, "HDR is currently not supported.");
try { try {
lutTextureId = storeLutAsTexture(lut); lutTextureId = storeLutAsTexture(lut);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException("Could not store the LUT as a texture.", e); throw new VideoFrameProcessingException("Could not store the LUT as a texture.", e);
} }
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr); return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);

View File

@ -18,7 +18,7 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import androidx.annotation.CallSuper; import androidx.annotation.CallSuper;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -61,7 +61,7 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
this.useHdr = useHdr; this.useHdr = useHdr;
inputListener = new InputListener() {}; inputListener = new InputListener() {};
outputListener = new OutputListener() {}; outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {}; errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor(); errorListenerExecutor = MoreExecutors.directExecutor();
} }
@ -74,9 +74,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* @param inputWidth The input width, in pixels. * @param inputWidth The input width, in pixels.
* @param inputHeight The input height, in pixels. * @param inputHeight The input height, in pixels.
* @return The output width and height of frames processed through {@link #drawFrame(int, long)}. * @return The output width and height of frames processed through {@link #drawFrame(int, long)}.
* @throws FrameProcessingException If an error occurs while configuring. * @throws VideoFrameProcessingException If an error occurs while configuring.
*/ */
public abstract Size configure(int inputWidth, int inputHeight) throws FrameProcessingException; public abstract Size configure(int inputWidth, int inputHeight)
throws VideoFrameProcessingException;
/** /**
* Draws one frame. * Draws one frame.
@ -90,10 +91,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* *
* @param inputTexId Identifier of a 2D OpenGL texture containing the input frame. * @param inputTexId Identifier of a 2D OpenGL texture containing the input frame.
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract void drawFrame(int inputTexId, long presentationTimeUs) public abstract void drawFrame(int inputTexId, long presentationTimeUs)
throws FrameProcessingException; throws VideoFrameProcessingException;
@Override @Override
public final void setInputListener(InputListener inputListener) { public final void setInputListener(InputListener inputListener) {
@ -134,19 +135,19 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
drawFrame(inputTexture.texId, presentationTimeUs); drawFrame(inputTexture.texId, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture); inputListener.onInputFrameProcessed(inputTexture);
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs); outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) { } catch (VideoFrameProcessingException | GlUtil.GlException | RuntimeException e) {
errorListenerExecutor.execute( errorListenerExecutor.execute(
() -> () ->
errorListener.onFrameProcessingError( errorListener.onError(
e instanceof FrameProcessingException e instanceof VideoFrameProcessingException
? (FrameProcessingException) e ? (VideoFrameProcessingException) e
: new FrameProcessingException(e))); : new VideoFrameProcessingException(e)));
} }
} }
@EnsuresNonNull("outputTexture") @EnsuresNonNull("outputTexture")
private void configureOutputTexture(int inputWidth, int inputHeight) private void configureOutputTexture(int inputWidth, int inputHeight)
throws GlUtil.GlException, FrameProcessingException { throws GlUtil.GlException, VideoFrameProcessingException {
this.inputWidth = inputWidth; this.inputWidth = inputWidth;
this.inputHeight = inputHeight; this.inputHeight = inputHeight;
Size outputSize = configure(inputWidth, inputHeight); Size outputSize = configure(inputWidth, inputHeight);
@ -184,12 +185,12 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
@Override @Override
@CallSuper @CallSuper
public void release() throws FrameProcessingException { public void release() throws VideoFrameProcessingException {
if (outputTexture != null) { if (outputTexture != null) {
try { try {
GlUtil.deleteTexture(outputTexture.texId); GlUtil.deleteTexture(outputTexture.texId);
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new FrameProcessingException(e); throw new VideoFrameProcessingException(e);
} }
} }
} }

View File

@ -15,7 +15,7 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -26,9 +26,9 @@ public abstract class TextureOverlay {
* Returns the overlay texture identifier displayed at the specified timestamp. * Returns the overlay texture identifier displayed at the specified timestamp.
* *
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame. * @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/ */
public abstract int getTextureId(long presentationTimeUs) throws FrameProcessingException; public abstract int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException;
// This method is required to find the size of a texture given a texture identifier using OpenGL // This method is required to find the size of a texture given a texture identifier using OpenGL
// ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv(). // ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv().

View File

@ -15,14 +15,14 @@
*/ */
package androidx.media3.effect; package androidx.media3.effect;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
/** /**
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link * Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* FrameProcessingException}. * VideoFrameProcessingException}.
*/ */
/* package */ interface FrameProcessingTask { /* package */ interface VideoFrameProcessingTask {
/** Runs the task. */ /** Runs the task. */
void run() throws FrameProcessingException, GlUtil.GlException; void run() throws VideoFrameProcessingException, GlUtil.GlException;
} }

View File

@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy; import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import java.util.ArrayDeque; import java.util.ArrayDeque;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
@ -29,36 +29,36 @@ import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionException;
/** /**
* Wrapper around a single thread {@link ExecutorService} for executing {@link FrameProcessingTask} * Wrapper around a single thread {@link ExecutorService} for executing {@link
* instances. * VideoFrameProcessingTask} instances.
* *
* <p>Public methods can be called from any thread. * <p>Public methods can be called from any thread.
* *
* <p>The wrapper handles calling {@link * <p>The wrapper handles calling {@link
* FrameProcessor.Listener#onFrameProcessingError(FrameProcessingException)} for errors that occur * VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during
* during these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed * these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* to be non-recoverable, so the {@code FrameProcessingTaskExecutor} should be released if an error * non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs. * occurs.
* *
* <p>{@linkplain #submitWithHighPriority(FrameProcessingTask) High priority tasks} are always * <p>{@linkplain #submitWithHighPriority(VideoFrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(FrameProcessingTask) default priority tasks}. Tasks with * executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with
* equal priority are executed in FIFO order. * equal priority are executed in FIFO order.
*/ */
/* package */ final class FrameProcessingTaskExecutor { /* package */ final class VideoFrameProcessingTaskExecutor {
private final ExecutorService singleThreadExecutorService; private final ExecutorService singleThreadExecutorService;
private final FrameProcessor.Listener listener; private final VideoFrameProcessor.Listener listener;
private final Object lock; private final Object lock;
@GuardedBy("lock") @GuardedBy("lock")
private final ArrayDeque<FrameProcessingTask> highPriorityTasks; private final ArrayDeque<VideoFrameProcessingTask> highPriorityTasks;
@GuardedBy("lock") @GuardedBy("lock")
private boolean shouldCancelTasks; private boolean shouldCancelTasks;
/** Creates a new instance. */ /** Creates a new instance. */
public FrameProcessingTaskExecutor( public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, FrameProcessor.Listener listener) { ExecutorService singleThreadExecutorService, VideoFrameProcessor.Listener listener) {
this.singleThreadExecutorService = singleThreadExecutorService; this.singleThreadExecutorService = singleThreadExecutorService;
this.listener = listener; this.listener = listener;
lock = new Object(); lock = new Object();
@ -66,11 +66,11 @@ import java.util.concurrent.RejectedExecutionException;
} }
/** /**
* Submits the given {@link FrameProcessingTask} to be executed after all pending tasks have * Submits the given {@link VideoFrameProcessingTask} to be executed after all pending tasks have
* completed. * completed.
*/ */
@SuppressWarnings("FutureReturnValueIgnored") @SuppressWarnings("FutureReturnValueIgnored")
public void submit(FrameProcessingTask task) { public void submit(VideoFrameProcessingTask task) {
@Nullable RejectedExecutionException executionException = null; @Nullable RejectedExecutionException executionException = null;
synchronized (lock) { synchronized (lock) {
if (shouldCancelTasks) { if (shouldCancelTasks) {
@ -89,13 +89,13 @@ import java.util.concurrent.RejectedExecutionException;
} }
/** /**
* Submits the given {@link FrameProcessingTask} to be executed after the currently running task * Submits the given {@link VideoFrameProcessingTask} to be executed after the currently running
* and all previously submitted high-priority tasks have completed. * task and all previously submitted high-priority tasks have completed.
* *
* <p>Tasks that were previously {@linkplain #submit(FrameProcessingTask) submitted} without * <p>Tasks that were previously {@linkplain #submit(VideoFrameProcessingTask) submitted} without
* high-priority and have not started executing will be executed after this task is complete. * high-priority and have not started executing will be executed after this task is complete.
*/ */
public void submitWithHighPriority(FrameProcessingTask task) { public void submitWithHighPriority(VideoFrameProcessingTask task) {
synchronized (lock) { synchronized (lock) {
if (shouldCancelTasks) { if (shouldCancelTasks) {
return; return;
@ -111,7 +111,7 @@ import java.util.concurrent.RejectedExecutionException;
/** /**
* Flushes all scheduled tasks. * Flushes all scheduled tasks.
* *
* <p>During flush, the {@code FrameProcessingTaskExecutor} ignores the {@linkplain #submit * <p>During flush, the {@code VideoFrameProcessingTaskExecutor} ignores the {@linkplain #submit
* submission of new tasks}. The tasks that are submitted before flushing are either executed or * submission of new tasks}. The tasks that are submitted before flushing are either executed or
* canceled when this method returns. * canceled when this method returns.
*/ */
@ -137,12 +137,12 @@ import java.util.concurrent.RejectedExecutionException;
/** /**
* Cancels remaining tasks, runs the given release task, and shuts down the background thread. * Cancels remaining tasks, runs the given release task, and shuts down the background thread.
* *
* @param releaseTask A {@link FrameProcessingTask} to execute before shutting down the background * @param releaseTask A {@link VideoFrameProcessingTask} to execute before shutting down the
* thread. * background thread.
* @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds. * @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds.
* @throws InterruptedException If interrupted while releasing resources. * @throws InterruptedException If interrupted while releasing resources.
*/ */
public void release(FrameProcessingTask releaseTask, long releaseWaitTimeMs) public void release(VideoFrameProcessingTask releaseTask, long releaseWaitTimeMs)
throws InterruptedException { throws InterruptedException {
synchronized (lock) { synchronized (lock) {
shouldCancelTasks = true; shouldCancelTasks = true;
@ -153,16 +153,16 @@ import java.util.concurrent.RejectedExecutionException;
singleThreadExecutorService.shutdown(); singleThreadExecutorService.shutdown();
try { try {
if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) { if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) {
listener.onFrameProcessingError(new FrameProcessingException("Release timed out")); listener.onError(new VideoFrameProcessingException("Release timed out"));
} }
releaseFuture.get(); releaseFuture.get();
} catch (ExecutionException e) { } catch (ExecutionException e) {
listener.onFrameProcessingError(new FrameProcessingException(e)); listener.onError(new VideoFrameProcessingException(e));
} }
} }
private Future<?> wrapTaskAndSubmitToExecutorService( private Future<?> wrapTaskAndSubmitToExecutorService(
FrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) { VideoFrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
return singleThreadExecutorService.submit( return singleThreadExecutorService.submit(
() -> { () -> {
try { try {
@ -172,7 +172,7 @@ import java.util.concurrent.RejectedExecutionException;
} }
} }
@Nullable FrameProcessingTask nextHighPriorityTask; @Nullable VideoFrameProcessingTask nextHighPriorityTask;
while (true) { while (true) {
synchronized (lock) { synchronized (lock) {
// Lock only polling to prevent blocking the public method calls. // Lock only polling to prevent blocking the public method calls.
@ -199,6 +199,6 @@ import java.util.concurrent.RejectedExecutionException;
} }
shouldCancelTasks = true; shouldCancelTasks = true;
} }
listener.onFrameProcessingError(FrameProcessingException.from(exception)); listener.onError(VideoFrameProcessingException.from(exception));
} }
} }

View File

@ -18,7 +18,7 @@ package androidx.media3.effect;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.After; import org.junit.After;
@ -30,20 +30,22 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest { public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100; private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockFrameProcessorListener = private final VideoFrameProcessor.Listener mockFrameProcessorListener =
mock(FrameProcessor.Listener.class); mock(VideoFrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor = private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new FrameProcessingTaskExecutor( new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener); Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class); private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);
private final ChainingGlShaderProgramListener chainingGlShaderProgramListener = private final ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener( new ChainingGlShaderProgramListener(
mockProducingGlShaderProgram, mockConsumingGlShaderProgram, frameProcessingTaskExecutor); mockProducingGlShaderProgram,
mockConsumingGlShaderProgram,
videoFrameProcessingTaskExecutor);
@After @After
public void release() throws InterruptedException { public void release() throws InterruptedException {
frameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS); videoFrameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
} }
@Test @Test

View File

@ -53,11 +53,11 @@ import androidx.media3.common.DrmInitData;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException; import androidx.media3.common.PlaybackException;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoSize; import androidx.media3.common.VideoSize;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.MediaFormatUtil; import androidx.media3.common.util.MediaFormatUtil;
@ -148,7 +148,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private final Context context; private final Context context;
private final VideoFrameReleaseHelper frameReleaseHelper; private final VideoFrameReleaseHelper frameReleaseHelper;
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
private final FrameProcessorManager frameProcessorManager; private final VideoFrameProcessorManager videoFrameProcessorManager;
private final long allowedJoiningTimeMs; private final long allowedJoiningTimeMs;
private final int maxDroppedFramesToNotify; private final int maxDroppedFramesToNotify;
private final boolean deviceNeedsNoPostProcessWorkaround; private final boolean deviceNeedsNoPostProcessWorkaround;
@ -352,7 +352,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
this.context = context.getApplicationContext(); this.context = context.getApplicationContext();
frameReleaseHelper = new VideoFrameReleaseHelper(this.context); frameReleaseHelper = new VideoFrameReleaseHelper(this.context);
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
frameProcessorManager = new FrameProcessorManager(frameReleaseHelper, /* renderer= */ this); videoFrameProcessorManager =
new VideoFrameProcessorManager(frameReleaseHelper, /* renderer= */ this);
deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround(); deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround();
joiningDeadlineMs = C.TIME_UNSET; joiningDeadlineMs = C.TIME_UNSET;
scalingMode = C.VIDEO_SCALING_MODE_DEFAULT; scalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
@ -563,8 +564,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
super.onPositionReset(positionUs, joining); super.onPositionReset(positionUs, joining);
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.flush(); videoFrameProcessorManager.flush();
} }
clearRenderedFirstFrame(); clearRenderedFirstFrame();
frameReleaseHelper.onPositionReset(); frameReleaseHelper.onPositionReset();
@ -581,8 +582,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
public boolean isEnded() { public boolean isEnded() {
boolean isEnded = super.isEnded(); boolean isEnded = super.isEnded();
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
isEnded &= frameProcessorManager.releasedLastFrame(); isEnded &= videoFrameProcessorManager.releasedLastFrame();
} }
return isEnded; return isEnded;
} }
@ -590,7 +591,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
public boolean isReady() { public boolean isReady() {
if (super.isReady() if (super.isReady()
&& (!frameProcessorManager.isEnabled() || frameProcessorManager.isReady()) && (!videoFrameProcessorManager.isEnabled() || videoFrameProcessorManager.isReady())
&& (renderedFirstFrameAfterReset && (renderedFirstFrameAfterReset
|| (placeholderSurface != null && displaySurface == placeholderSurface) || (placeholderSurface != null && displaySurface == placeholderSurface)
|| getCodec() == null || getCodec() == null
@ -650,8 +651,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
try { try {
super.onReset(); super.onReset();
} finally { } finally {
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.reset(); videoFrameProcessorManager.reset();
} }
if (placeholderSurface != null) { if (placeholderSurface != null) {
releasePlaceholderSurface(); releasePlaceholderSurface();
@ -691,14 +692,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
case MSG_SET_VIDEO_EFFECTS: case MSG_SET_VIDEO_EFFECTS:
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
List<Effect> videoEffects = (List<Effect>) checkNotNull(message); List<Effect> videoEffects = (List<Effect>) checkNotNull(message);
frameProcessorManager.setVideoEffects(videoEffects); videoFrameProcessorManager.setVideoEffects(videoEffects);
break; break;
case MSG_SET_VIDEO_OUTPUT_RESOLUTION: case MSG_SET_VIDEO_OUTPUT_RESOLUTION:
Size outputResolution = (Size) checkNotNull(message); Size outputResolution = (Size) checkNotNull(message);
if (outputResolution.getWidth() != 0 if (outputResolution.getWidth() != 0
&& outputResolution.getHeight() != 0 && outputResolution.getHeight() != 0
&& displaySurface != null) { && displaySurface != null) {
frameProcessorManager.setOutputSurfaceInfo(displaySurface, outputResolution); videoFrameProcessorManager.setOutputSurfaceInfo(displaySurface, outputResolution);
} }
break; break;
case MSG_SET_AUDIO_ATTRIBUTES: case MSG_SET_AUDIO_ATTRIBUTES:
@ -737,7 +738,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@State int state = getState(); @State int state = getState();
@Nullable MediaCodecAdapter codec = getCodec(); @Nullable MediaCodecAdapter codec = getCodec();
if (codec != null && !frameProcessorManager.isEnabled()) { if (codec != null && !videoFrameProcessorManager.isEnabled()) {
if (Util.SDK_INT >= 23 && displaySurface != null && !codecNeedsSetOutputSurfaceWorkaround) { if (Util.SDK_INT >= 23 && displaySurface != null && !codecNeedsSetOutputSurfaceWorkaround) {
setOutputSurfaceV23(codec, displaySurface); setOutputSurfaceV23(codec, displaySurface);
} else { } else {
@ -754,17 +755,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Set joining deadline to report MediaCodecVideoRenderer is ready. // Set joining deadline to report MediaCodecVideoRenderer is ready.
setJoiningDeadlineMs(); setJoiningDeadlineMs();
} }
// When FrameProcessorManager is enabled, set FrameProcessorManager's display surface and an // When VideoFrameProcessorManager is enabled, set VideoFrameProcessorManager's display
// unknown size. // surface and an unknown size.
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.setOutputSurfaceInfo(displaySurface, Size.UNKNOWN); videoFrameProcessorManager.setOutputSurfaceInfo(displaySurface, Size.UNKNOWN);
} }
} else { } else {
// The display surface has been removed. // The display surface has been removed.
clearReportedVideoSize(); clearReportedVideoSize();
clearRenderedFirstFrame(); clearRenderedFirstFrame();
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.clearOutputSurfaceInfo(); videoFrameProcessorManager.clearOutputSurfaceInfo();
} }
} }
} else if (displaySurface != null && displaySurface != placeholderSurface) { } else if (displaySurface != null && displaySurface != placeholderSurface) {
@ -817,16 +818,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
displaySurface = placeholderSurface; displaySurface = placeholderSurface;
} }
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
mediaFormat = frameProcessorManager.amendMediaFormatKeys(mediaFormat); mediaFormat = videoFrameProcessorManager.amendMediaFormatKeys(mediaFormat);
} }
return MediaCodecAdapter.Configuration.createForVideoDecoding( return MediaCodecAdapter.Configuration.createForVideoDecoding(
codecInfo, codecInfo,
mediaFormat, mediaFormat,
format, format,
frameProcessorManager.isEnabled() videoFrameProcessorManager.isEnabled()
? frameProcessorManager.getInputSurface() ? videoFrameProcessorManager.getInputSurface()
: displaySurface, : displaySurface,
crypto); crypto);
} }
@ -856,8 +857,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override @Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
super.render(positionUs, elapsedRealtimeUs); super.render(positionUs, elapsedRealtimeUs);
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs); videoFrameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
} }
} }
@ -964,8 +965,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@CallSuper @CallSuper
@Override @Override
protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException { protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException {
if (!frameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.maybeEnable(format); videoFrameProcessorManager.maybeEnable(format);
} }
} }
@ -982,7 +983,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
if (Util.SDK_INT >= 23 && tunneling) { if (Util.SDK_INT >= 23 && tunneling) {
tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(checkNotNull(getCodec())); tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(checkNotNull(getCodec()));
} }
frameProcessorManager.onCodecInitialized(name); videoFrameProcessorManager.onCodecInitialized(name);
} }
@Override @Override
@ -1070,16 +1071,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
height = rotatedHeight; height = rotatedHeight;
pixelWidthHeightRatio = 1 / pixelWidthHeightRatio; pixelWidthHeightRatio = 1 / pixelWidthHeightRatio;
} }
} else if (!frameProcessorManager.isEnabled()) { } else if (!videoFrameProcessorManager.isEnabled()) {
// Neither the codec nor the FrameProcessor applies the rotation. // Neither the codec nor the VideoFrameProcessor applies the rotation.
unappliedRotationDegrees = format.rotationDegrees; unappliedRotationDegrees = format.rotationDegrees;
} }
decodedVideoSize = decodedVideoSize =
new VideoSize(width, height, unappliedRotationDegrees, pixelWidthHeightRatio); new VideoSize(width, height, unappliedRotationDegrees, pixelWidthHeightRatio);
frameReleaseHelper.onFormatChanged(format.frameRate); frameReleaseHelper.onFormatChanged(format.frameRate);
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.setInputFormat( videoFrameProcessorManager.setInputFormat(
format format
.buildUpon() .buildUpon()
.setWidth(width) .setWidth(width)
@ -1142,7 +1143,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
if (bufferPresentationTimeUs != lastBufferPresentationTimeUs) { if (bufferPresentationTimeUs != lastBufferPresentationTimeUs) {
if (!frameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
frameReleaseHelper.onNextFrame(bufferPresentationTimeUs); frameReleaseHelper.onNextFrame(bufferPresentationTimeUs);
} // else, update the frameReleaseHelper when releasing the processed frames. } // else, update the frameReleaseHelper when releasing the processed frames.
this.lastBufferPresentationTimeUs = bufferPresentationTimeUs; this.lastBufferPresentationTimeUs = bufferPresentationTimeUs;
@ -1180,9 +1181,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean forceRenderOutputBuffer = shouldForceRender(positionUs, earlyUs); boolean forceRenderOutputBuffer = shouldForceRender(positionUs, earlyUs);
if (forceRenderOutputBuffer) { if (forceRenderOutputBuffer) {
boolean notifyFrameMetaDataListener; boolean notifyFrameMetaDataListener;
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
notifyFrameMetaDataListener = false; notifyFrameMetaDataListener = false;
if (!frameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) { if (!videoFrameProcessorManager.maybeRegisterFrame(
format, presentationTimeUs, isLastBuffer)) {
return false; return false;
} }
} else { } else {
@ -1204,7 +1206,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Apply a timestamp adjustment, if there is one. // Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs = frameReleaseHelper.adjustReleaseTime(unadjustedFrameReleaseTimeNs); long adjustedReleaseTimeNs = frameReleaseHelper.adjustReleaseTime(unadjustedFrameReleaseTimeNs);
if (!frameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000; earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
} // else, use the unadjusted earlyUs in previewing use cases. } // else, use the unadjusted earlyUs in previewing use cases.
@ -1222,9 +1224,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return true; return true;
} }
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs); videoFrameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
if (frameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) { if (videoFrameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) {
renderOutputBufferNow( renderOutputBufferNow(
codec, codec,
format, format,
@ -1457,8 +1459,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount); droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount);
} }
flushOrReinitializeCodec(); flushOrReinitializeCodec();
if (frameProcessorManager.isEnabled()) { if (videoFrameProcessorManager.isEnabled()) {
frameProcessorManager.flush(); videoFrameProcessorManager.flush();
} }
return true; return true;
} }
@ -1513,11 +1515,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean notifyFrameMetadataListener) { boolean notifyFrameMetadataListener) {
// In previewing mode, use the presentation time as release time so that the SurfaceTexture is // In previewing mode, use the presentation time as release time so that the SurfaceTexture is
// accompanied by the rendered frame's presentation time. Setting a realtime based release time // accompanied by the rendered frame's presentation time. Setting a realtime based release time
// is only relevant when rendering to a SurfaceView (that is when not using FrameProcessor) for // is only relevant when rendering to a SurfaceView (that is when not using VideoFrameProcessor)
// better frame release. In previewing mode MediaCodec renders to FrameProcessor's input // for better frame release. In previewing mode MediaCodec renders to VideoFrameProcessor's
// surface, which is not a SurfaceView. // input surface, which is not a SurfaceView.
long releaseTimeNs = long releaseTimeNs =
frameProcessorManager.isEnabled() videoFrameProcessorManager.isEnabled()
? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000 ? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000
: System.nanoTime(); : System.nanoTime();
if (notifyFrameMetadataListener) { if (notifyFrameMetadataListener) {
@ -1534,9 +1536,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* Renders the output buffer with the specified index. This method is only called if the platform * Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is less than 21. * API version of the device is less than 21.
* *
* <p>When frame processing is {@linkplain FrameProcessorManager#isEnabled()} enabled}, this * <p>When video frame processing is {@linkplain VideoFrameProcessorManager#isEnabled()} enabled},
* method renders to {@link FrameProcessorManager}'s {@linkplain * this method renders to {@link VideoFrameProcessorManager}'s {@linkplain
* FrameProcessorManager#getInputSurface() input surface}. * VideoFrameProcessorManager#getInputSurface() input surface}.
* *
* @param codec The codec that owns the output buffer. * @param codec The codec that owns the output buffer.
* @param index The index of the output buffer to drop. * @param index The index of the output buffer to drop.
@ -1548,7 +1550,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
TraceUtil.endSection(); TraceUtil.endSection();
decoderCounters.renderedOutputBufferCount++; decoderCounters.renderedOutputBufferCount++;
consecutiveDroppedFrameCount = 0; consecutiveDroppedFrameCount = 0;
if (!frameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
maybeNotifyVideoSizeChanged(decodedVideoSize); maybeNotifyVideoSizeChanged(decodedVideoSize);
maybeNotifyRenderedFirstFrame(); maybeNotifyRenderedFirstFrame();
@ -1559,9 +1561,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* Renders the output buffer with the specified index. This method is only called if the platform * Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is 21 or later. * API version of the device is 21 or later.
* *
* <p>When frame processing is {@linkplain FrameProcessorManager#isEnabled()} enabled}, this * <p>When video frame processing is {@linkplain VideoFrameProcessorManager#isEnabled()} enabled},
* method renders to {@link FrameProcessorManager}'s {@linkplain * this method renders to {@link VideoFrameProcessorManager}'s {@linkplain
* FrameProcessorManager#getInputSurface() input surface}. * VideoFrameProcessorManager#getInputSurface() input surface}.
* *
* @param codec The codec that owns the output buffer. * @param codec The codec that owns the output buffer.
* @param index The index of the output buffer to drop. * @param index The index of the output buffer to drop.
@ -1576,7 +1578,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
TraceUtil.endSection(); TraceUtil.endSection();
decoderCounters.renderedOutputBufferCount++; decoderCounters.renderedOutputBufferCount++;
consecutiveDroppedFrameCount = 0; consecutiveDroppedFrameCount = 0;
if (!frameProcessorManager.isEnabled()) { if (!videoFrameProcessorManager.isEnabled()) {
lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
maybeNotifyVideoSizeChanged(decodedVideoSize); maybeNotifyVideoSizeChanged(decodedVideoSize);
maybeNotifyRenderedFirstFrame(); maybeNotifyRenderedFirstFrame();
@ -1834,8 +1836,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return new MediaCodecVideoDecoderException(cause, codecInfo, displaySurface); return new MediaCodecVideoDecoderException(cause, codecInfo, displaySurface);
} }
/** Manages {@link FrameProcessor} interactions. */ /** Manages {@link VideoFrameProcessor} interactions. */
private static final class FrameProcessorManager { private static final class VideoFrameProcessorManager {
/** The threshold for releasing a processed frame. */ /** The threshold for releasing a processed frame. */
private static final long EARLY_THRESHOLD_US = 50_000; private static final long EARLY_THRESHOLD_US = 50_000;
@ -1848,7 +1850,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private final ArrayDeque<Pair<Long, Format>> pendingFrameFormats; private final ArrayDeque<Pair<Long, Format>> pendingFrameFormats;
private @MonotonicNonNull Handler handler; private @MonotonicNonNull Handler handler;
@Nullable private FrameProcessor frameProcessor; @Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private CopyOnWriteArrayList<Effect> videoEffects; @Nullable private CopyOnWriteArrayList<Effect> videoEffects;
@Nullable private Format inputFormat; @Nullable private Format inputFormat;
/** /**
@ -1858,16 +1860,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Nullable private Pair<Surface, Size> currentSurfaceAndSize; @Nullable private Pair<Surface, Size> currentSurfaceAndSize;
private int frameProcessorMaxPendingFrameCount; private int videoFrameProcessorMaxPendingFrameCount;
private boolean canEnableFrameProcessing; private boolean canEnableFrameProcessing;
/** /**
* Whether the last frame of the current stream is decoded and registered to {@link * Whether the last frame of the current stream is decoded and registered to {@link
* FrameProcessor}. * VideoFrameProcessor}.
*/ */
private boolean registeredLastFrame; private boolean registeredLastFrame;
/** Whether the last frame of the current stream is processed by the {@link FrameProcessor}. */ /**
* Whether the last frame of the current stream is processed by the {@link VideoFrameProcessor}.
*/
private boolean processedLastFrame; private boolean processedLastFrame;
/** Whether the last frame of the current stream is released to the output {@link Surface}. */ /** Whether the last frame of the current stream is released to the output {@link Surface}. */
@ -1880,14 +1884,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private long pendingOutputSizeChangeNotificationTimeUs; private long pendingOutputSizeChangeNotificationTimeUs;
/** Creates a new instance. */ /** Creates a new instance. */
public FrameProcessorManager( public VideoFrameProcessorManager(
VideoFrameReleaseHelper frameReleaseHelper, VideoFrameReleaseHelper frameReleaseHelper,
@UnderInitialization MediaCodecVideoRenderer renderer) { @UnderInitialization MediaCodecVideoRenderer renderer) {
this.frameReleaseHelper = frameReleaseHelper; this.frameReleaseHelper = frameReleaseHelper;
this.renderer = renderer; this.renderer = renderer;
processedFramesTimestampsUs = new ArrayDeque<>(); processedFramesTimestampsUs = new ArrayDeque<>();
pendingFrameFormats = new ArrayDeque<>(); pendingFrameFormats = new ArrayDeque<>();
frameProcessorMaxPendingFrameCount = C.LENGTH_UNSET; videoFrameProcessorMaxPendingFrameCount = C.LENGTH_UNSET;
canEnableFrameProcessing = true; canEnableFrameProcessing = true;
lastCodecBufferPresentationTimestampUs = C.TIME_UNSET; lastCodecBufferPresentationTimestampUs = C.TIME_UNSET;
processedFrameSize = VideoSize.UNKNOWN; processedFrameSize = VideoSize.UNKNOWN;
@ -1904,30 +1908,32 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
this.videoEffects.addAll(videoEffects); this.videoEffects.addAll(videoEffects);
} }
/** Returns whether frame processing is enabled. */ /** Returns whether video frame processing is enabled. */
public boolean isEnabled() { public boolean isEnabled() {
return frameProcessor != null; return videoFrameProcessor != null;
} }
/** Returns whether {@code FrameProcessorManager} is ready to accept input frames. */ /** Returns whether {@code VideoFrameProcessorManager} is ready to accept input frames. */
public boolean isReady() { public boolean isReady() {
return currentSurfaceAndSize == null || !currentSurfaceAndSize.second.equals(Size.UNKNOWN); return currentSurfaceAndSize == null || !currentSurfaceAndSize.second.equals(Size.UNKNOWN);
} }
/** Whether the {@link FrameProcessor} has released the last frame in the current stream. */ /**
* Whether the {@link VideoFrameProcessor} has released the last frame in the current stream.
*/
public boolean releasedLastFrame() { public boolean releasedLastFrame() {
return releasedLastFrame; return releasedLastFrame;
} }
/** /**
* Flushes the {@link FrameProcessor}. * Flushes the {@link VideoFrameProcessor}.
* *
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is enabled} before calling * <p>Caller must ensure video frame processing {@linkplain #isEnabled() is enabled} before
* this method. * calling this method.
*/ */
public void flush() { public void flush() {
checkStateNotNull(frameProcessor); checkStateNotNull(videoFrameProcessor);
frameProcessor.flush(); videoFrameProcessor.flush();
processedFramesTimestampsUs.clear(); processedFramesTimestampsUs.clear();
handler.removeCallbacksAndMessages(/* token= */ null); handler.removeCallbacksAndMessages(/* token= */ null);
@ -1939,14 +1945,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
/** /**
* Tries to enable frame processing. * Tries to enable video frame processing.
* *
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is not enabled} before * <p>Caller must ensure video frame processing {@linkplain #isEnabled() is not enabled} before
* calling this method. * calling this method.
* *
* @param inputFormat The {@link Format} that is input into the {@link FrameProcessor}. * @param inputFormat The {@link Format} that is input into the {@link VideoFrameProcessor}.
* @return Whether frame processing is enabled. * @return Whether video frame processing is enabled.
* @throws ExoPlaybackException When enabling the {@link FrameProcessor} failed. * @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException { public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException {
@ -1981,11 +1987,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Insert as the first effect as if the decoder has applied the rotation. // Insert as the first effect as if the decoder has applied the rotation.
videoEffects.add( videoEffects.add(
/* index= */ 0, /* index= */ 0,
FrameProcessorAccessor.createRotationEffect(inputFormat.rotationDegrees)); VideoFrameProcessorAccessor.createRotationEffect(inputFormat.rotationDegrees));
} }
frameProcessor = videoFrameProcessor =
FrameProcessorAccessor.getFrameProcessorFactory() VideoFrameProcessorAccessor.getFrameProcessorFactory()
.create( .create(
renderer.context, renderer.context,
checkNotNull(videoEffects), checkNotNull(videoEffects),
@ -1995,19 +2001,20 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/* isInputTextureExternal= */ true, /* isInputTextureExternal= */ true,
/* releaseFramesAutomatically= */ false, /* releaseFramesAutomatically= */ false,
/* executor= */ handler::post, /* executor= */ handler::post,
new FrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
@Nullable Format inputFormat = FrameProcessorManager.this.inputFormat; @Nullable Format inputFormat = VideoFrameProcessorManager.this.inputFormat;
checkStateNotNull(inputFormat); checkStateNotNull(inputFormat);
// TODO(b/264889146): Handle Effect that changes output size based on pts. // TODO(b/264889146): Handle Effect that changes output size based on pts.
processedFrameSize = processedFrameSize =
new VideoSize( new VideoSize(
width, width,
height, height,
// FrameProcessor is configured to produce rotation free frames. // VideoFrameProcessor is configured to produce rotation free
// frames.
/* unappliedRotationDegrees= */ 0, /* unappliedRotationDegrees= */ 0,
// FrameProcessor always outputs pixelWidthHeightRatio 1. // VideoFrameProcessor always outputs pixelWidthHeightRatio 1.
/* pixelWidthHeightRatio= */ 1.f); /* pixelWidthHeightRatio= */ 1.f);
pendingOutputSizeChange = true; pendingOutputSizeChange = true;
} }
@ -2031,27 +2038,27 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
renderer.setPendingPlaybackException( renderer.setPendingPlaybackException(
renderer.createRendererException( renderer.createRendererException(
exception, exception,
inputFormat, inputFormat,
PlaybackException.ERROR_CODE_FRAME_PROCESSING_FAILED)); PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
} }
@Override @Override
public void onFrameProcessingEnded() { public void onEnded() {
throw new IllegalStateException(); throw new IllegalStateException();
} }
}); });
} catch (Exception e) { } catch (Exception e) {
throw renderer.createRendererException( throw renderer.createRendererException(
e, inputFormat, PlaybackException.ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED); e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
} }
if (currentSurfaceAndSize != null) { if (currentSurfaceAndSize != null) {
Size outputSurfaceSize = currentSurfaceAndSize.second; Size outputSurfaceSize = currentSurfaceAndSize.second;
frameProcessor.setOutputSurfaceInfo( videoFrameProcessor.setOutputSurfaceInfo(
new SurfaceInfo( new SurfaceInfo(
currentSurfaceAndSize.first, currentSurfaceAndSize.first,
outputSurfaceSize.getWidth(), outputSurfaceSize.getWidth(),
@ -2063,20 +2070,20 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
/** /**
* Returns the {@linkplain FrameProcessor#getInputSurface input surface} of the {@link * Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link
* FrameProcessor}. * VideoFrameProcessor}.
* *
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling * <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* this method. * calling this method.
*/ */
public Surface getInputSurface() { public Surface getInputSurface() {
return checkNotNull(frameProcessor).getInputSurface(); return checkNotNull(videoFrameProcessor).getInputSurface();
} }
/** /**
* Sets the output surface info. * Sets the output surface info.
* *
* @param outputSurface The {@link Surface} to which {@link FrameProcessor} outputs. * @param outputSurface The {@link Surface} to which {@link VideoFrameProcessor} outputs.
* @param outputResolution The {@link Size} of the output resolution. * @param outputResolution The {@link Size} of the output resolution.
*/ */
public void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution) { public void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution) {
@ -2087,7 +2094,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
currentSurfaceAndSize = Pair.create(outputSurface, outputResolution); currentSurfaceAndSize = Pair.create(outputSurface, outputResolution);
if (isEnabled()) { if (isEnabled()) {
checkNotNull(frameProcessor) checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo( .setOutputSurfaceInfo(
new SurfaceInfo( new SurfaceInfo(
outputSurface, outputResolution.getWidth(), outputResolution.getHeight())); outputSurface, outputResolution.getWidth(), outputResolution.getHeight()));
@ -2097,22 +2104,22 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** /**
* Clears the set output surface info. * Clears the set output surface info.
* *
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling * <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* this method. * calling this method.
*/ */
public void clearOutputSurfaceInfo() { public void clearOutputSurfaceInfo() {
checkNotNull(frameProcessor).setOutputSurfaceInfo(null); checkNotNull(videoFrameProcessor).setOutputSurfaceInfo(null);
currentSurfaceAndSize = null; currentSurfaceAndSize = null;
} }
/** /**
* Sets the input surface info. * Sets the input surface info.
* *
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling * <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* this method. * calling this method.
*/ */
public void setInputFormat(Format inputFormat) { public void setInputFormat(Format inputFormat) {
checkNotNull(frameProcessor) checkNotNull(videoFrameProcessor)
.setInputFrameInfo( .setInputFrameInfo(
new FrameInfo.Builder(inputFormat.width, inputFormat.height) new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
@ -2127,7 +2134,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
} }
/** Sets the necessary {@link MediaFormat} keys for frame processing. */ /** Sets the necessary {@link MediaFormat} keys for video frame processing. */
@SuppressWarnings("InlinedApi") @SuppressWarnings("InlinedApi")
public MediaFormat amendMediaFormatKeys(MediaFormat mediaFormat) { public MediaFormat amendMediaFormatKeys(MediaFormat mediaFormat) {
if (Util.SDK_INT >= 29 if (Util.SDK_INT >= 29
@ -2140,31 +2147,32 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** /**
* Must be called when the codec is initialized. * Must be called when the codec is initialized.
* *
* <p>Sets the {@code frameProcessorMaxPendingFrameCount} based on the {@code codecName}. * <p>Sets the {@code videoFrameProcessorMaxPendingFrameCount} based on the {@code codecName}.
*/ */
public void onCodecInitialized(String codecName) { public void onCodecInitialized(String codecName) {
frameProcessorMaxPendingFrameCount = videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecEncoders( Util.getMaxPendingFramesCountForMediaCodecEncoders(
renderer.context, codecName, /* requestedHdrToneMapping= */ false); renderer.context, codecName, /* requestedHdrToneMapping= */ false);
} }
/** /**
* Tries to {@linkplain FrameProcessor#registerInputFrame register an input frame}. * Tries to {@linkplain VideoFrameProcessor#registerInputFrame register an input frame}.
* *
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling * <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* this method. * calling this method.
* *
* @param format The {@link Format} associated with the frame. * @param format The {@link Format} associated with the frame.
* @param isLastBuffer Whether the buffer is the last from the decoder to register. * @param isLastBuffer Whether the buffer is the last from the decoder to register.
* @return Whether {@link MediaCodec} should render the frame to {@link FrameProcessor}. * @return Whether {@link MediaCodec} should render the frame to {@link VideoFrameProcessor}.
*/ */
public boolean maybeRegisterFrame( public boolean maybeRegisterFrame(
Format format, long presentationTimestampUs, boolean isLastBuffer) { Format format, long presentationTimestampUs, boolean isLastBuffer) {
checkStateNotNull(frameProcessor); checkStateNotNull(videoFrameProcessor);
checkState(frameProcessorMaxPendingFrameCount != C.LENGTH_UNSET); checkState(videoFrameProcessorMaxPendingFrameCount != C.LENGTH_UNSET);
checkState(!registeredLastFrame); checkState(!registeredLastFrame);
if (frameProcessor.getPendingInputFrameCount() < frameProcessorMaxPendingFrameCount) { if (videoFrameProcessor.getPendingInputFrameCount()
frameProcessor.registerInputFrame(); < videoFrameProcessorMaxPendingFrameCount) {
videoFrameProcessor.registerInputFrame();
if (currentFrameFormat == null) { if (currentFrameFormat == null) {
currentFrameFormat = Pair.create(presentationTimestampUs, format); currentFrameFormat = Pair.create(presentationTimestampUs, format);
@ -2185,11 +2193,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** /**
* Releases the processed frames to the {@linkplain #setOutputSurfaceInfo output surface}. * Releases the processed frames to the {@linkplain #setOutputSurfaceInfo output surface}.
* *
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling * <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* this method. * calling this method.
*/ */
public void releaseProcessedFrames(long positionUs, long elapsedRealtimeUs) { public void releaseProcessedFrames(long positionUs, long elapsedRealtimeUs) {
checkStateNotNull(frameProcessor); checkStateNotNull(videoFrameProcessor);
while (!processedFramesTimestampsUs.isEmpty()) { while (!processedFramesTimestampsUs.isEmpty()) {
boolean isStarted = renderer.getState() == STATE_STARTED; boolean isStarted = renderer.getState() == STATE_STARTED;
long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek()); long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
@ -2205,7 +2213,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs); boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs);
if (shouldReleaseFrameImmediately) { if (shouldReleaseFrameImmediately) {
releaseProcessedFrameInternal( releaseProcessedFrameInternal(
FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame); VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
break; break;
} else if (!isStarted || positionUs == renderer.initialPositionUs) { } else if (!isStarted || positionUs == renderer.initialPositionUs) {
return; return;
@ -2224,9 +2232,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
earlyUs = (adjustedFrameReleaseTimeNs - System.nanoTime()) / 1000; earlyUs = (adjustedFrameReleaseTimeNs - System.nanoTime()) / 1000;
// TODO(b/238302341) Handle very late buffers and drop to key frame. Need to flush // TODO(b/238302341) Handle very late buffers and drop to key frame. Need to flush
// FrameProcessor input frames in this case. // VideoFrameProcessor input frames in this case.
if (renderer.shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastFrame)) { if (renderer.shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastFrame)) {
releaseProcessedFrameInternal(FrameProcessor.DROP_OUTPUT_FRAME, isLastFrame); releaseProcessedFrameInternal(VideoFrameProcessor.DROP_OUTPUT_FRAME, isLastFrame);
continue; continue;
} }
@ -2249,12 +2257,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** /**
* Releases the resources. * Releases the resources.
* *
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is not enabled} before * <p>Caller must ensure video frame processing {@linkplain #isEnabled() is not enabled} before
* calling this method. * calling this method.
*/ */
public void reset() { public void reset() {
checkNotNull(frameProcessor).release(); checkNotNull(videoFrameProcessor).release();
frameProcessor = null; videoFrameProcessor = null;
if (handler != null) { if (handler != null) {
handler.removeCallbacksAndMessages(/* token= */ null); handler.removeCallbacksAndMessages(/* token= */ null);
} }
@ -2266,11 +2274,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) { private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) {
checkStateNotNull(frameProcessor); checkStateNotNull(videoFrameProcessor);
frameProcessor.releaseOutputFrame(releaseTimeNs); videoFrameProcessor.releaseOutputFrame(releaseTimeNs);
processedFramesTimestampsUs.remove(); processedFramesTimestampsUs.remove();
renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
if (releaseTimeNs != FrameProcessor.DROP_OUTPUT_FRAME) { if (releaseTimeNs != VideoFrameProcessor.DROP_OUTPUT_FRAME) {
renderer.maybeNotifyRenderedFirstFrame(); renderer.maybeNotifyRenderedFirstFrame();
} }
if (isLastFrame) { if (isLastFrame) {
@ -2278,12 +2286,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
} }
} }
private static final class FrameProcessorAccessor { private static final class VideoFrameProcessorAccessor {
private static @MonotonicNonNull Constructor<?> scaleToFitTransformationBuilderConstructor; private static @MonotonicNonNull Constructor<?> scaleToFitTransformationBuilderConstructor;
private static @MonotonicNonNull Method setRotationMethod; private static @MonotonicNonNull Method setRotationMethod;
private static @MonotonicNonNull Method buildScaleToFitTransformationMethod; private static @MonotonicNonNull Method buildScaleToFitTransformationMethod;
private static @MonotonicNonNull Constructor<?> frameProcessorFactorConstructor; private static @MonotonicNonNull Constructor<?> videoFrameProcessorFactoryConstructor;
public static Effect createRotationEffect(float rotationDegrees) throws Exception { public static Effect createRotationEffect(float rotationDegrees) throws Exception {
prepare(); prepare();
@ -2292,16 +2300,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return (Effect) checkNotNull(buildScaleToFitTransformationMethod.invoke(builder)); return (Effect) checkNotNull(buildScaleToFitTransformationMethod.invoke(builder));
} }
public static FrameProcessor.Factory getFrameProcessorFactory() throws Exception { public static VideoFrameProcessor.Factory getFrameProcessorFactory() throws Exception {
prepare(); prepare();
return (FrameProcessor.Factory) frameProcessorFactorConstructor.newInstance(); return (VideoFrameProcessor.Factory) videoFrameProcessorFactoryConstructor.newInstance();
} }
@EnsuresNonNull({ @EnsuresNonNull({
"ScaleToFitEffectBuilder", "ScaleToFitEffectBuilder",
"SetRotationMethod", "SetRotationMethod",
"SetRotationMethod", "SetRotationMethod",
"FrameProcessorFactoryClass" "VideoFrameProcessorFactoryClass"
}) })
private static void prepare() throws Exception { private static void prepare() throws Exception {
if (scaleToFitTransformationBuilderConstructor == null if (scaleToFitTransformationBuilderConstructor == null
@ -2316,9 +2324,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
buildScaleToFitTransformationMethod = buildScaleToFitTransformationMethod =
scaleToFitTransformationBuilderClass.getMethod("build"); scaleToFitTransformationBuilderClass.getMethod("build");
} }
if (frameProcessorFactorConstructor == null) { if (videoFrameProcessorFactoryConstructor == null) {
frameProcessorFactorConstructor = videoFrameProcessorFactoryConstructor =
Class.forName("androidx.media3.effect.GlEffectsFrameProcessor$Factory") Class.forName("androidx.media3.effect.DefaultVideoFrameProcessor$Factory")
.getConstructor(); .getConstructor();
} }
} }

View File

@ -33,9 +33,9 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
@ -44,18 +44,18 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** A test runner for {@link FrameProcessor} tests. */ /** A test runner for {@link VideoFrameProcessor} tests. */
@UnstableApi @UnstableApi
@RequiresApi(19) @RequiresApi(19)
public final class FrameProcessorTestRunner { public final class VideoFrameProcessorTestRunner {
/** A builder for {@link FrameProcessorTestRunner} instances. */ /** A builder for {@link VideoFrameProcessorTestRunner} instances. */
public static final class Builder { public static final class Builder {
/** The ratio of width over height, for each pixel in a frame. */ /** The ratio of width over height, for each pixel in a frame. */
private static final float DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO = 1; private static final float DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO = 1;
private @MonotonicNonNull String testId; private @MonotonicNonNull String testId;
private FrameProcessor.@MonotonicNonNull Factory frameProcessorFactory; private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory;
private @MonotonicNonNull String videoAssetPath; private @MonotonicNonNull String videoAssetPath;
private @MonotonicNonNull String outputFileLabel; private @MonotonicNonNull String outputFileLabel;
private @MonotonicNonNull ImmutableList<Effect> effects; private @MonotonicNonNull ImmutableList<Effect> effects;
@ -82,13 +82,14 @@ public final class FrameProcessorTestRunner {
} }
/** /**
* Sets the {@link FrameProcessor.Factory}. * Sets the {@link VideoFrameProcessor.Factory}.
* *
* <p>This is a required value. * <p>This is a required value.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) { public Builder setVideoFrameProcessorFactory(
this.frameProcessorFactory = frameProcessorFactory; VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this; return this;
} }
@ -171,7 +172,7 @@ public final class FrameProcessorTestRunner {
return this; return this;
} }
/** /**
* Sets the input track type. See {@link FrameProcessor.Factory#create}. * Sets the input track type. See {@link VideoFrameProcessor.Factory#create}.
* *
* <p>The default value is {@code true}. * <p>The default value is {@code true}.
*/ */
@ -181,14 +182,14 @@ public final class FrameProcessorTestRunner {
return this; return this;
} }
public FrameProcessorTestRunner build() throws FrameProcessingException { public VideoFrameProcessorTestRunner build() throws VideoFrameProcessingException {
checkStateNotNull(testId, "testId must be set."); checkStateNotNull(testId, "testId must be set.");
checkStateNotNull(frameProcessorFactory, "frameProcessorFactory must be set."); checkStateNotNull(videoFrameProcessorFactory, "videoFrameProcessorFactory must be set.");
checkStateNotNull(videoAssetPath, "videoAssetPath must be set."); checkStateNotNull(videoAssetPath, "videoAssetPath must be set.");
return new FrameProcessorTestRunner( return new VideoFrameProcessorTestRunner(
testId, testId,
frameProcessorFactory, videoFrameProcessorFactory,
videoAssetPath, videoAssetPath,
outputFileLabel == null ? "" : outputFileLabel, outputFileLabel == null ? "" : outputFileLabel,
effects == null ? ImmutableList.of() : effects, effects == null ? ImmutableList.of() : effects,
@ -200,25 +201,25 @@ public final class FrameProcessorTestRunner {
} }
/** /**
* Time to wait for the decoded frame to populate the {@link FrameProcessor} instance's input * Time to wait for the decoded frame to populate the {@link VideoFrameProcessor} instance's input
* surface and the {@link FrameProcessor} to finish processing the frame, in milliseconds. * surface and the {@link VideoFrameProcessor} to finish processing the frame, in milliseconds.
*/ */
private static final int FRAME_PROCESSING_WAIT_MS = 5000; private static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5000;
private final String testId; private final String testId;
private final String videoAssetPath; private final String videoAssetPath;
private final String outputFileLabel; private final String outputFileLabel;
private final float pixelWidthHeightRatio; private final float pixelWidthHeightRatio;
private final AtomicReference<FrameProcessingException> frameProcessingException; private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
private final FrameProcessor frameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private volatile @MonotonicNonNull ImageReader outputImageReader; private volatile @MonotonicNonNull ImageReader outputImageReader;
private volatile boolean frameProcessingEnded; private volatile boolean videoFrameProcessingEnded;
private FrameProcessorTestRunner( private VideoFrameProcessorTestRunner(
String testId, String testId,
FrameProcessor.Factory frameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
String videoAssetPath, String videoAssetPath,
String outputFileLabel, String outputFileLabel,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
@ -226,15 +227,15 @@ public final class FrameProcessorTestRunner {
ColorInfo inputColorInfo, ColorInfo inputColorInfo,
ColorInfo outputColorInfo, ColorInfo outputColorInfo,
boolean isInputTextureExternal) boolean isInputTextureExternal)
throws FrameProcessingException { throws VideoFrameProcessingException {
this.testId = testId; this.testId = testId;
this.videoAssetPath = videoAssetPath; this.videoAssetPath = videoAssetPath;
this.outputFileLabel = outputFileLabel; this.outputFileLabel = outputFileLabel;
this.pixelWidthHeightRatio = pixelWidthHeightRatio; this.pixelWidthHeightRatio = pixelWidthHeightRatio;
frameProcessingException = new AtomicReference<>(); videoFrameProcessingException = new AtomicReference<>();
frameProcessor = videoFrameProcessor =
frameProcessorFactory.create( videoFrameProcessorFactory.create(
getApplicationContext(), getApplicationContext(),
effects, effects,
DebugViewProvider.NONE, DebugViewProvider.NONE,
@ -243,13 +244,13 @@ public final class FrameProcessorTestRunner {
isInputTextureExternal, isInputTextureExternal,
/* releaseFramesAutomatically= */ true, /* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new FrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
outputImageReader = outputImageReader =
ImageReader.newInstance( ImageReader.newInstance(
width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1); width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
checkNotNull(frameProcessor) checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo( .setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height)); new SurfaceInfo(outputImageReader.getSurface(), width, height));
} }
@ -260,13 +261,13 @@ public final class FrameProcessorTestRunner {
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
frameProcessingException.set(exception); videoFrameProcessingException.set(exception);
} }
@Override @Override
public void onFrameProcessingEnded() { public void onEnded() {
frameProcessingEnded = true; videoFrameProcessingEnded = true;
} }
}); });
} }
@ -277,13 +278,13 @@ public final class FrameProcessorTestRunner {
new DecodeOneFrameUtil.Listener() { new DecodeOneFrameUtil.Listener() {
@Override @Override
public void onContainerExtracted(MediaFormat mediaFormat) { public void onContainerExtracted(MediaFormat mediaFormat) {
frameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder( new FrameInfo.Builder(
mediaFormat.getInteger(MediaFormat.KEY_WIDTH), mediaFormat.getInteger(MediaFormat.KEY_WIDTH),
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)) mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());
frameProcessor.registerInputFrame(); videoFrameProcessor.registerInputFrame();
} }
@Override @Override
@ -291,36 +292,36 @@ public final class FrameProcessorTestRunner {
// Do nothing. // Do nothing.
} }
}, },
frameProcessor.getInputSurface()); videoFrameProcessor.getInputSurface());
return endFrameProcessingAndGetImage(); return endFrameProcessingAndGetImage();
} }
public Bitmap processImageFrameAndEnd(Bitmap inputBitmap) throws Exception { public Bitmap processImageFrameAndEnd(Bitmap inputBitmap) throws Exception {
frameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight()) new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build()); .build());
frameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1); videoFrameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
return endFrameProcessingAndGetImage(); return endFrameProcessingAndGetImage();
} }
private Bitmap endFrameProcessingAndGetImage() throws Exception { private Bitmap endFrameProcessingAndGetImage() throws Exception {
frameProcessor.signalEndOfInput(); videoFrameProcessor.signalEndOfInput();
Thread.sleep(FRAME_PROCESSING_WAIT_MS); Thread.sleep(VIDEO_FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingEnded).isTrue(); assertThat(videoFrameProcessingEnded).isTrue();
assertThat(frameProcessingException.get()).isNull(); assertThat(videoFrameProcessingException.get()).isNull();
Image frameProcessorOutputImage = checkNotNull(outputImageReader).acquireLatestImage(); Image videoFrameProcessorOutputImage = checkNotNull(outputImageReader).acquireLatestImage();
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888Image(frameProcessorOutputImage); Bitmap actualBitmap = createArgb8888BitmapFromRgba8888Image(videoFrameProcessorOutputImage);
frameProcessorOutputImage.close(); videoFrameProcessorOutputImage.close();
maybeSaveTestBitmapToCacheDirectory(testId, /* bitmapLabel= */ outputFileLabel, actualBitmap); maybeSaveTestBitmapToCacheDirectory(testId, /* bitmapLabel= */ outputFileLabel, actualBitmap);
return actualBitmap; return actualBitmap;
} }
public void release() { public void release() {
if (frameProcessor != null) { if (videoFrameProcessor != null) {
frameProcessor.release(); videoFrameProcessor.release();
} }
} }
} }

View File

@ -30,9 +30,9 @@ import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffectsFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.test.utils.DecodeOneFrameUtil; import androidx.media3.test.utils.DecodeOneFrameUtil;
import androidx.media3.test.utils.FrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After; import org.junit.After;
@ -40,10 +40,10 @@ import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** /**
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link GlEffectsFrameProcessor}. * Instrumentation pixel-test for HDR to SDR tone-mapping via {@link DefaultVideoFrameProcessor}.
* *
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output * <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* matches expected output, either from a golden file or from another edit. * output matches expected output, either from a golden file or from another edit.
*/ */
// TODO(b/263395272): Move this test to effects/mh tests. // TODO(b/263395272): Move this test to effects/mh tests.
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@ -75,12 +75,12 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
"OpenGL-based HDR to SDR tone mapping is unsupported below API 29."; "OpenGL-based HDR to SDR tone mapping is unsupported below API 29.";
private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support."; private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support.";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner; private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After @After
public void release() { public void release() {
if (frameProcessorTestRunner != null) { if (videoFrameProcessorTestRunner != null) {
frameProcessorTestRunner.release(); videoFrameProcessorTestRunner.release();
} }
} }
@ -114,7 +114,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED) .setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2) .setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build(); .build();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING) .setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING)
.setInputColorInfo(hlgColor) .setInputColorInfo(hlgColor)
@ -124,7 +124,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap; Bitmap actualBitmap;
try { try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) { } catch (UnsupportedOperationException e) {
if (e.getMessage() != null if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) { && e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
@ -177,7 +177,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED) .setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2) .setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build(); .build();
frameProcessorTestRunner = videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId) getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING) .setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.setInputColorInfo(pqColor) .setInputColorInfo(pqColor)
@ -187,7 +187,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap; Bitmap actualBitmap;
try { try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd(); actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) { } catch (UnsupportedOperationException e) {
if (e.getMessage() != null if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) { && e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
@ -209,10 +209,10 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE); .isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
} }
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
return new FrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId) .setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory()); .setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory());
} }
} }

View File

@ -16,11 +16,11 @@
package androidx.media3.transformer; package androidx.media3.transformer;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor; import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.effect.GlEffectsFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.List; import java.util.List;
@ -45,19 +45,19 @@ public final class Effects {
*/ */
public final ImmutableList<Effect> videoEffects; public final ImmutableList<Effect> videoEffects;
/** /**
* The {@link FrameProcessor.Factory} for the {@link FrameProcessor} to use when applying the * The {@link VideoFrameProcessor.Factory} for the {@link VideoFrameProcessor} to use when
* {@code videoEffects} to the video frames. * applying the {@code videoEffects} to the video frames.
*/ */
public final FrameProcessor.Factory frameProcessorFactory; public final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/** /**
* Creates an instance using a {@link GlEffectsFrameProcessor.Factory}. * Creates an instance using a {@link DefaultVideoFrameProcessor.Factory}.
* *
* <p>This is equivalent to calling {@link Effects#Effects(List, List, FrameProcessor.Factory)} * <p>This is equivalent to calling {@link Effects#Effects(List, List,
* with a {@link GlEffectsFrameProcessor.Factory}. * VideoFrameProcessor.Factory)} with a {@link DefaultVideoFrameProcessor.Factory}.
*/ */
public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) { public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) {
this(audioProcessors, videoEffects, new GlEffectsFrameProcessor.Factory()); this(audioProcessors, videoEffects, new DefaultVideoFrameProcessor.Factory());
} }
/** /**
@ -65,14 +65,14 @@ public final class Effects {
* *
* @param audioProcessors The {@link #audioProcessors}. * @param audioProcessors The {@link #audioProcessors}.
* @param videoEffects The {@link #videoEffects}. * @param videoEffects The {@link #videoEffects}.
* @param frameProcessorFactory The {@link #frameProcessorFactory}. * @param videoFrameProcessorFactory The {@link #videoFrameProcessorFactory}.
*/ */
public Effects( public Effects(
List<AudioProcessor> audioProcessors, List<AudioProcessor> audioProcessors,
List<Effect> videoEffects, List<Effect> videoEffects,
FrameProcessor.Factory frameProcessorFactory) { VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.audioProcessors = ImmutableList.copyOf(audioProcessors); this.audioProcessors = ImmutableList.copyOf(audioProcessors);
this.videoEffects = ImmutableList.copyOf(videoEffects); this.videoEffects = ImmutableList.copyOf(videoEffects);
this.frameProcessorFactory = frameProcessorFactory; this.videoFrameProcessorFactory = videoFrameProcessorFactory;
} }
} }

View File

@ -21,8 +21,8 @@ import android.os.SystemClock;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.FrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.FrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor.AudioFormat; import androidx.media3.common.audio.AudioProcessor.AudioFormat;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -66,7 +66,7 @@ public final class TransformationException extends Exception {
ERROR_CODE_ENCODER_INIT_FAILED, ERROR_CODE_ENCODER_INIT_FAILED,
ERROR_CODE_ENCODING_FAILED, ERROR_CODE_ENCODING_FAILED,
ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED, ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED,
ERROR_CODE_FRAME_PROCESSING_FAILED, ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED,
ERROR_CODE_AUDIO_PROCESSING_FAILED, ERROR_CODE_AUDIO_PROCESSING_FAILED,
ERROR_CODE_MUXING_FAILED, ERROR_CODE_MUXING_FAILED,
}) })
@ -151,8 +151,8 @@ public final class TransformationException extends Exception {
// Video editing errors (5xxx). // Video editing errors (5xxx).
/** Caused by a frame processing failure. */ /** Caused by a video frame processing failure. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 5001; public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 5001;
// Audio processing errors (6xxx). // Audio processing errors (6xxx).
@ -182,7 +182,7 @@ public final class TransformationException extends Exception {
.put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED) .put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED)
.put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED) .put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED)
.put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED) .put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED)
.put("ERROR_CODE_FRAME_PROCESSING_FAILED", ERROR_CODE_FRAME_PROCESSING_FAILED) .put("ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED", ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED) .put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED)
.put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED) .put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED)
.buildOrThrow(); .buildOrThrow();
@ -271,15 +271,15 @@ public final class TransformationException extends Exception {
} }
/** /**
* Creates an instance for a {@link FrameProcessor} related exception. * Creates an instance for a {@link VideoFrameProcessor} related exception.
* *
* @param cause The cause of the failure. * @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}. * @param errorCode See {@link #errorCode}.
* @return The created instance. * @return The created instance.
*/ */
/* package */ static TransformationException createForFrameProcessingException( /* package */ static TransformationException createForVideoFrameProcessingException(
FrameProcessingException cause, int errorCode) { VideoFrameProcessingException cause, int errorCode) {
return new TransformationException("Frame processing error", cause, errorCode); return new TransformationException("Video frame processing error", cause, errorCode);
} }
/** /**

View File

@ -28,10 +28,10 @@ import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.DebugViewProvider; import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.MediaLibraryInfo; import androidx.media3.common.MediaLibraryInfo;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.audio.AudioProcessor; import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.audio.SonicAudioProcessor; import androidx.media3.common.audio.SonicAudioProcessor;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
@ -39,7 +39,7 @@ import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.ListenerSet; import androidx.media3.common.util.ListenerSet;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.effect.GlEffectsFrameProcessor; import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory; import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
@ -89,7 +89,7 @@ public final class Transformer {
private boolean generateSilentAudio; private boolean generateSilentAudio;
private ListenerSet<Transformer.Listener> listeners; private ListenerSet<Transformer.Listener> listeners;
private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory; private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory;
private FrameProcessor.Factory frameProcessorFactory; private VideoFrameProcessor.Factory videoFrameProcessorFactory;
private Codec.EncoderFactory encoderFactory; private Codec.EncoderFactory encoderFactory;
private Muxer.Factory muxerFactory; private Muxer.Factory muxerFactory;
private Looper looper; private Looper looper;
@ -106,7 +106,7 @@ public final class Transformer {
transformationRequest = new TransformationRequest.Builder().build(); transformationRequest = new TransformationRequest.Builder().build();
audioProcessors = ImmutableList.of(); audioProcessors = ImmutableList.of();
videoEffects = ImmutableList.of(); videoEffects = ImmutableList.of();
frameProcessorFactory = new GlEffectsFrameProcessor.Factory(); videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory();
encoderFactory = new DefaultEncoderFactory.Builder(this.context).build(); encoderFactory = new DefaultEncoderFactory.Builder(this.context).build();
muxerFactory = new DefaultMuxer.Factory(); muxerFactory = new DefaultMuxer.Factory();
looper = Util.getCurrentOrMainLooper(); looper = Util.getCurrentOrMainLooper();
@ -126,7 +126,7 @@ public final class Transformer {
this.generateSilentAudio = transformer.generateSilentAudio; this.generateSilentAudio = transformer.generateSilentAudio;
this.listeners = transformer.listeners; this.listeners = transformer.listeners;
this.assetLoaderFactory = transformer.assetLoaderFactory; this.assetLoaderFactory = transformer.assetLoaderFactory;
this.frameProcessorFactory = transformer.frameProcessorFactory; this.videoFrameProcessorFactory = transformer.videoFrameProcessorFactory;
this.encoderFactory = transformer.encoderFactory; this.encoderFactory = transformer.encoderFactory;
this.muxerFactory = transformer.muxerFactory; this.muxerFactory = transformer.muxerFactory;
this.looper = transformer.looper; this.looper = transformer.looper;
@ -298,13 +298,14 @@ public final class Transformer {
} }
/** /**
* @deprecated Set the {@link FrameProcessor.Factory} in an {@link EditedMediaItem}, and pass it * @deprecated Set the {@link VideoFrameProcessor.Factory} in an {@link EditedMediaItem}, and
* to {@link #start(EditedMediaItem, String)} instead. * pass it to {@link #start(EditedMediaItem, String)} instead.
*/ */
@CanIgnoreReturnValue @CanIgnoreReturnValue
@Deprecated @Deprecated
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) { public Builder setFrameProcessorFactory(
this.frameProcessorFactory = frameProcessorFactory; VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this; return this;
} }
@ -450,7 +451,7 @@ public final class Transformer {
generateSilentAudio, generateSilentAudio,
listeners, listeners,
assetLoaderFactory, assetLoaderFactory,
frameProcessorFactory, videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerFactory, muxerFactory,
looper, looper,
@ -608,7 +609,7 @@ public final class Transformer {
private final boolean generateSilentAudio; private final boolean generateSilentAudio;
private final ListenerSet<Transformer.Listener> listeners; private final ListenerSet<Transformer.Listener> listeners;
private final AssetLoader.Factory assetLoaderFactory; private final AssetLoader.Factory assetLoaderFactory;
private final FrameProcessor.Factory frameProcessorFactory; private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Codec.EncoderFactory encoderFactory; private final Codec.EncoderFactory encoderFactory;
private final Muxer.Factory muxerFactory; private final Muxer.Factory muxerFactory;
private final Looper looper; private final Looper looper;
@ -629,7 +630,7 @@ public final class Transformer {
boolean generateSilentAudio, boolean generateSilentAudio,
ListenerSet<Listener> listeners, ListenerSet<Listener> listeners,
AssetLoader.Factory assetLoaderFactory, AssetLoader.Factory assetLoaderFactory,
FrameProcessor.Factory frameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
Muxer.Factory muxerFactory, Muxer.Factory muxerFactory,
Looper looper, Looper looper,
@ -647,7 +648,7 @@ public final class Transformer {
this.generateSilentAudio = generateSilentAudio; this.generateSilentAudio = generateSilentAudio;
this.listeners = listeners; this.listeners = listeners;
this.assetLoaderFactory = assetLoaderFactory; this.assetLoaderFactory = assetLoaderFactory;
this.frameProcessorFactory = frameProcessorFactory; this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.encoderFactory = encoderFactory; this.encoderFactory = encoderFactory;
this.muxerFactory = muxerFactory; this.muxerFactory = muxerFactory;
this.looper = looper; this.looper = looper;
@ -844,7 +845,7 @@ public final class Transformer {
.setRemoveAudio(removeAudio) .setRemoveAudio(removeAudio)
.setRemoveVideo(removeVideo) .setRemoveVideo(removeVideo)
.setFlattenForSlowMotion(flattenForSlowMotion) .setFlattenForSlowMotion(flattenForSlowMotion)
.setEffects(new Effects(audioProcessors, videoEffects, frameProcessorFactory)) .setEffects(new Effects(audioProcessors, videoEffects, videoFrameProcessorFactory))
.build(); .build();
start(editedMediaItem, path); start(editedMediaItem, path);
} }

View File

@ -498,7 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
streamOffsetUs, streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.effects.videoEffects, firstEditedMediaItem.effects.videoEffects,
firstEditedMediaItem.effects.frameProcessorFactory, firstEditedMediaItem.effects.videoFrameProcessorFactory,
encoderFactory, encoderFactory,
muxerWrapper, muxerWrapper,
/* errorConsumer= */ this::onError, /* errorConsumer= */ this::onError,

View File

@ -37,10 +37,10 @@ import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect; import androidx.media3.common.Effect;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.FrameInfo; import androidx.media3.common.FrameInfo;
import androidx.media3.common.FrameProcessingException;
import androidx.media3.common.FrameProcessor;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.Consumer; import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
@ -58,8 +58,8 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */ /** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265; private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final FrameProcessor frameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo frameProcessorInputColor; private final ColorInfo videoFrameProcessorInputColor;
private final FrameInfo firstFrameInfo; private final FrameInfo firstFrameInfo;
private final EncoderWrapper encoderWrapper; private final EncoderWrapper encoderWrapper;
@ -67,7 +67,7 @@ import org.checkerframework.dataflow.qual.Pure;
/** /**
* The timestamp of the last buffer processed before {@linkplain * The timestamp of the last buffer processed before {@linkplain
* FrameProcessor.Listener#onFrameProcessingEnded() frame processing has ended}. * VideoFrameProcessor.Listener#onEnded() frame processing has ended}.
*/ */
private volatile long finalFramePresentationTimeUs; private volatile long finalFramePresentationTimeUs;
@ -78,7 +78,7 @@ import org.checkerframework.dataflow.qual.Pure;
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
FrameProcessor.Factory frameProcessorFactory, VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory, Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
Consumer<TransformationException> errorConsumer, Consumer<TransformationException> errorConsumer,
@ -122,12 +122,12 @@ import org.checkerframework.dataflow.qual.Pure;
ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor(); ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor();
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor, // If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping. // possibly by tone mapping.
frameProcessorInputColor = videoFrameProcessorInputColor =
isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor; isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with // For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as // C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder. // C.COLOR_TRANSFER_SDR to the encoder.
ColorInfo frameProcessorOutputColor = ColorInfo videoFrameProcessorOutputColor =
isGlToneMapping isGlToneMapping
? new ColorInfo.Builder() ? new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709) .setColorSpace(C.COLOR_SPACE_BT709)
@ -136,23 +136,23 @@ import org.checkerframework.dataflow.qual.Pure;
.build() .build()
: encoderInputColor; : encoderInputColor;
try { try {
frameProcessor = videoFrameProcessor =
frameProcessorFactory.create( videoFrameProcessorFactory.create(
context, context,
effects, effects,
debugViewProvider, debugViewProvider,
frameProcessorInputColor, videoFrameProcessorInputColor,
frameProcessorOutputColor, videoFrameProcessorOutputColor,
MimeTypes.isVideo(firstInputFormat.sampleMimeType), MimeTypes.isVideo(firstInputFormat.sampleMimeType),
/* releaseFramesAutomatically= */ true, /* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(), MoreExecutors.directExecutor(),
new FrameProcessor.Listener() { new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs; private long lastProcessedFramePresentationTimeUs;
@Override @Override
public void onOutputSizeChanged(int width, int height) { public void onOutputSizeChanged(int width, int height) {
try { try {
checkNotNull(frameProcessor) checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height)); .setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height));
} catch (TransformationException exception) { } catch (TransformationException exception) {
errorConsumer.accept(exception); errorConsumer.accept(exception);
@ -166,14 +166,15 @@ import org.checkerframework.dataflow.qual.Pure;
} }
@Override @Override
public void onFrameProcessingError(FrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
errorConsumer.accept( errorConsumer.accept(
TransformationException.createForFrameProcessingException( TransformationException.createForVideoFrameProcessingException(
exception, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED)); exception,
TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
} }
@Override @Override
public void onFrameProcessingEnded() { public void onEnded() {
VideoSamplePipeline.this.finalFramePresentationTimeUs = VideoSamplePipeline.this.finalFramePresentationTimeUs =
lastProcessedFramePresentationTimeUs; lastProcessedFramePresentationTimeUs;
try { try {
@ -183,9 +184,9 @@ import org.checkerframework.dataflow.qual.Pure;
} }
} }
}); });
} catch (FrameProcessingException e) { } catch (VideoFrameProcessingException e) {
throw TransformationException.createForFrameProcessingException( throw TransformationException.createForVideoFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED); e, TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
} }
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees. // The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = int decodedWidth =
@ -206,43 +207,43 @@ import org.checkerframework.dataflow.qual.Pure;
@Override @Override
public void onMediaItemChanged( public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) { EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo( videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build()); new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
} }
@Override @Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) { public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
frameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
} }
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return frameProcessor.getInputSurface(); return videoFrameProcessor.getInputSurface();
} }
@Override @Override
public ColorInfo getExpectedInputColorInfo() { public ColorInfo getExpectedInputColorInfo() {
return frameProcessorInputColor; return videoFrameProcessorInputColor;
} }
@Override @Override
public void registerVideoFrame() { public void registerVideoFrame() {
frameProcessor.registerInputFrame(); videoFrameProcessor.registerInputFrame();
} }
@Override @Override
public int getPendingVideoFrameCount() { public int getPendingVideoFrameCount() {
return frameProcessor.getPendingInputFrameCount(); return videoFrameProcessor.getPendingInputFrameCount();
} }
@Override @Override
public void signalEndOfVideoInput() { public void signalEndOfVideoInput() {
frameProcessor.signalEndOfInput(); videoFrameProcessor.signalEndOfInput();
} }
@Override @Override
public void release() { public void release() {
frameProcessor.release(); videoFrameProcessor.release();
encoderWrapper.release(); encoderWrapper.release();
} }