Effect: Rename FrameProcessor

Rename FrameProcessor to VideoFrameProcessor, and GlEffectsFrameProcessor to
DefaultVideoFrameProcessor.

Most changes are semi-mechanical, semi-manual find-replace, preserving case:
* "FrameProc" -> "VideoFrameProc" (ex. FrameProcessor -> VideoFrameProcessor, and
   FrameProcessingException -> VideoFrameProcessingException)
* "GlEffectsVideoFrameProc" -> "DefaultVideoFrameProc"

PiperOrigin-RevId: 509887384
This commit is contained in:
huangdarwin 2023-02-15 19:22:51 +00:00 committed by christosts
parent 3e5ae92bc6
commit 413f61b96d
53 changed files with 777 additions and 752 deletions

View File

@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import com.google.android.exoplayer2.effect.SingleFrameGlShaderProgram;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
/**
@ -59,7 +59,7 @@ import java.io.IOException;
* @param minInnerRadius The lower bound of the radius that is unaffected by the effect.
* @param maxInnerRadius The upper bound of the radius that is unaffected by the effect.
* @param outerRadius The radius after which all pixels are black.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public PeriodicVignetteShaderProgram(
Context context,
@ -69,7 +69,7 @@ import java.io.IOException;
float minInnerRadius,
float maxInnerRadius,
float outerRadius)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
checkArgument(minInnerRadius <= maxInnerRadius);
checkArgument(maxInnerRadius <= outerRadius);
@ -78,7 +78,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
glProgram.setFloatsUniform("uCenter", new float[] {centerX, centerY});
glProgram.setFloatsUniform("uOuterRadius", new float[] {outerRadius});
@ -95,7 +95,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -107,17 +108,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}

View File

@ -26,9 +26,9 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlShaderProgram;
import com.google.android.exoplayer2.effect.TextureInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.LibraryLoader;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.framework.AppTextureFrame;
@ -112,7 +112,7 @@ import java.util.concurrent.Future;
futures = new ArrayDeque<>();
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
EglManager eglManager = new EglManager(EGL14.eglGetCurrentContext());
frameProcessor =
@ -155,7 +155,7 @@ import java.util.concurrent.Future;
frameProcessor.setAsynchronousErrorListener(
error ->
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(error))));
() -> errorListener.onError(new VideoFrameProcessingException(error))));
}
@Override
@ -191,7 +191,7 @@ import java.util.concurrent.Future;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
if (acceptedFrame) {
inputListener.onInputFrameProcessed(inputTexture);
@ -213,9 +213,7 @@ import java.util.concurrent.Future;
Thread.currentThread().interrupt();
if (errorListener != null) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
}
}
@ -254,14 +252,12 @@ import java.util.concurrent.Future;
try {
if (!singleThreadExecutorService.awaitTermination(RELEASE_WAIT_TIME_MS, MILLISECONDS)) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
new FrameProcessingException("Release timed out")));
() -> errorListener.onError(new VideoFrameProcessingException("Release timed out")));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
frameProcessor.close();
@ -294,11 +290,11 @@ import java.util.concurrent.Future;
futures.remove().get();
} catch (ExecutionException e) {
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(new FrameProcessingException(e)));
() -> errorListener.onError(new VideoFrameProcessingException(e)));
}
}
}

View File

@ -229,10 +229,10 @@ public class PlaybackException extends Exception implements Bundleable {
// Frame processing errors (7xxx).
/** Caused by a failure when initializing a {@link FrameProcessor}. */
public static final int ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a frame. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 7001;
/** Caused by a failure when initializing a {@link VideoFrameProcessor}. */
public static final int ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED = 7000;
/** Caused by a failure when processing a video frame. */
public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 7001;
/**
* Player implementations that want to surface custom errors can use error codes greater than this
@ -311,10 +311,10 @@ public class PlaybackException extends Exception implements Bundleable {
return "ERROR_CODE_DRM_DEVICE_REVOKED";
case ERROR_CODE_DRM_LICENSE_EXPIRED:
return "ERROR_CODE_DRM_LICENSE_EXPIRED";
case ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_FRAME_PROCESSING_FAILED";
case ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED:
return "ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED";
case ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED:
return "ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED";
default:
if (errorCode >= CUSTOM_ERROR_CODE_BASE) {
return "custom error code";

View File

@ -21,25 +21,26 @@ import com.google.android.exoplayer2.C;
* Thrown when an exception occurs while preparing an {@link Effect}, or applying an {@link Effect}
* to video frames.
*/
public final class FrameProcessingException extends Exception {
public final class VideoFrameProcessingException extends Exception {
/**
* Wraps the given exception in a {@code FrameProcessingException} if it is not already a {@code
* FrameProcessingException} and returns the exception otherwise.
* Wraps the given exception in a {@code VideoFrameProcessingException} if it is not already a
* {@code VideoFrameProcessingException} and returns the exception otherwise.
*/
public static FrameProcessingException from(Exception exception) {
public static VideoFrameProcessingException from(Exception exception) {
return from(exception, /* presentationTimeUs= */ C.TIME_UNSET);
}
/**
* Wraps the given exception in a {@code FrameProcessingException} with the given timestamp if it
* is not already a {@code FrameProcessingException} and returns the exception otherwise.
* Wraps the given exception in a {@code VideoFrameProcessingException} with the given timestamp
* if it is not already a {@code VideoFrameProcessingException} and returns the exception
* otherwise.
*/
public static FrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof FrameProcessingException) {
return (FrameProcessingException) exception;
public static VideoFrameProcessingException from(Exception exception, long presentationTimeUs) {
if (exception instanceof VideoFrameProcessingException) {
return (VideoFrameProcessingException) exception;
} else {
return new FrameProcessingException(exception, presentationTimeUs);
return new VideoFrameProcessingException(exception, presentationTimeUs);
}
}
@ -54,7 +55,7 @@ public final class FrameProcessingException extends Exception {
*
* @param message The detail message for this exception.
*/
public FrameProcessingException(String message) {
public VideoFrameProcessingException(String message) {
this(message, /* presentationTimeUs= */ C.TIME_UNSET);
}
@ -64,7 +65,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(String message, long presentationTimeUs) {
public VideoFrameProcessingException(String message, long presentationTimeUs) {
super(message);
this.presentationTimeUs = presentationTimeUs;
}
@ -75,7 +76,7 @@ public final class FrameProcessingException extends Exception {
* @param message The detail message for this exception.
* @param cause The cause of this exception.
*/
public FrameProcessingException(String message, Throwable cause) {
public VideoFrameProcessingException(String message, Throwable cause) {
this(message, cause, /* presentationTimeUs= */ C.TIME_UNSET);
}
@ -86,7 +87,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
public VideoFrameProcessingException(String message, Throwable cause, long presentationTimeUs) {
super(message, cause);
this.presentationTimeUs = presentationTimeUs;
}
@ -96,7 +97,7 @@ public final class FrameProcessingException extends Exception {
*
* @param cause The cause of this exception.
*/
public FrameProcessingException(Throwable cause) {
public VideoFrameProcessingException(Throwable cause) {
this(cause, /* presentationTimeUs= */ C.TIME_UNSET);
}
@ -106,7 +107,7 @@ public final class FrameProcessingException extends Exception {
* @param cause The cause of this exception.
* @param presentationTimeUs The timestamp of the frame for which the exception occurred.
*/
public FrameProcessingException(Throwable cause, long presentationTimeUs) {
public VideoFrameProcessingException(Throwable cause, long presentationTimeUs) {
super(cause);
this.presentationTimeUs = presentationTimeUs;
}

View File

@ -25,7 +25,7 @@ import java.util.List;
import java.util.concurrent.Executor;
/**
* Interface for a frame processor that applies changes to individual video frames.
* Interface for a video frame processor that applies changes to individual video frames.
*
* <p>The changes are specified by {@link Effect} instances passed to {@link Factory#create}.
*
@ -36,13 +36,13 @@ import java.util.concurrent.Executor;
* <p>The caller must {@linkplain #registerInputFrame() register} input frames before rendering them
* to the input {@link Surface}.
*/
public interface FrameProcessor {
public interface VideoFrameProcessor {
// TODO(b/243036513): Allow effects to be replaced.
/** A factory for {@link FrameProcessor} instances. */
/** A factory for {@link VideoFrameProcessor} instances. */
interface Factory {
/**
* Creates a new {@link FrameProcessor} instance.
* Creates a new {@link VideoFrameProcessor} instance.
*
* @param context A {@link Context}.
* @param effects The {@link Effect} instances to apply to each frame. Applied on the {@code
@ -54,18 +54,18 @@ public interface FrameProcessor {
* video) or not (e.g. from a {@link Bitmap}). See <a
* href="https://source.android.com/docs/core/graphics/arch-st#ext_texture">the
* SurfaceTexture docs</a> for more information on external textures.
* @param releaseFramesAutomatically If {@code true}, the {@link FrameProcessor} will render
* output frames to the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface}
* automatically as {@link FrameProcessor} is done processing them. If {@code false}, the
* {@link FrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* @param releaseFramesAutomatically If {@code true}, the instance will render output frames to
* the {@linkplain #setOutputSurfaceInfo(SurfaceInfo) output surface} automatically as
* {@link VideoFrameProcessor} is done processing them. If {@code false}, the {@link
* VideoFrameProcessor} will block until {@link #releaseOutputFrame(long)} is called, to
* render or drop the frame.
* @param executor The {@link Executor} on which the {@code listener} is invoked.
* @param listener A {@link Listener}.
* @return A new instance.
* @throws FrameProcessingException If a problem occurs while creating the {@link
* FrameProcessor}.
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
* VideoFrameProcessor}.
*/
FrameProcessor create(
VideoFrameProcessor create(
Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider,
@ -75,7 +75,7 @@ public interface FrameProcessor {
boolean releaseFramesAutomatically,
Executor executor,
Listener listener)
throws FrameProcessingException;
throws VideoFrameProcessingException;
}
/**
@ -105,15 +105,15 @@ public interface FrameProcessor {
void onOutputFrameAvailable(long presentationTimeUs);
/**
* Called when an exception occurs during asynchronous frame processing.
* Called when an exception occurs during asynchronous video frame processing.
*
* <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link FrameProcessor} should be released.
* the {@link VideoFrameProcessor} should be released.
*/
void onFrameProcessingError(FrameProcessingException exception);
void onError(VideoFrameProcessingException exception);
/** Called after the {@link FrameProcessor} has produced its final output frame. */
void onFrameProcessingEnded();
/** Called after the {@link VideoFrameProcessor} has produced its final output frame. */
void onEnded();
}
/**
@ -126,14 +126,14 @@ public interface FrameProcessor {
long DROP_OUTPUT_FRAME = -2;
/**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}.
* Provides an input {@link Bitmap} to the {@code VideoFrameProcessor}.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code false}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}.
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
@ -143,9 +143,10 @@ public interface FrameProcessor {
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from.
* Returns the input {@link Surface}, where {@code VideoFrameProcessor} consumes input frames
* from.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
@ -170,11 +171,11 @@ public interface FrameProcessor {
void setInputFrameInfo(FrameInfo inputFrameInfo);
/**
* Informs the {@code FrameProcessor} that a frame will be queued to its input surface.
* Informs the {@code VideoFrameProcessor} that a frame will be queued to its input surface.
*
* <p>Must be called before rendering a frame to the frame processor's input surface.
* <p>Must be called before rendering a frame to the {@code VideoFrameProcessor}'s input surface.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
@ -188,7 +189,7 @@ public interface FrameProcessor {
* Returns the number of input frames that have been {@linkplain #registerInputFrame() registered}
* but not processed off the {@linkplain #getInputSurface() input surface} yet.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>Can be called on any thread.
@ -200,7 +201,7 @@ public interface FrameProcessor {
* dropped, they will be rendered to this output {@link SurfaceInfo}.
*
* <p>The new output {@link SurfaceInfo} is applied from the next output frame rendered onwards.
* If the output {@link SurfaceInfo} is {@code null}, the {@code FrameProcessor} will stop
* If the output {@link SurfaceInfo} is {@code null}, the {@code VideoFrameProcessor} will stop
* rendering pending frames and resume rendering once a non-null {@link SurfaceInfo} is set.
*
* <p>If the dimensions given in {@link SurfaceInfo} do not match the {@linkplain
@ -234,7 +235,7 @@ public interface FrameProcessor {
void releaseOutputFrame(long releaseTimeNs);
/**
* Informs the {@code FrameProcessor} that no further input frames should be accepted.
* Informs the {@code VideoFrameProcessor} that no further input frames should be accepted.
*
* <p>Can be called on any thread.
*
@ -243,12 +244,12 @@ public interface FrameProcessor {
void signalEndOfInput();
/**
* Flushes the {@code FrameProcessor}.
* Flushes the {@code VideoFrameProcessor}.
*
* <p>All the frames that are {@linkplain #registerInputFrame() registered} prior to calling this
* method are no longer considered to be registered when this method returns.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@code VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* <p>{@link Listener} methods invoked prior to calling this method should be ignored.
@ -258,10 +259,9 @@ public interface FrameProcessor {
/**
* Releases all resources.
*
* <p>If the frame processor is released before it has {@linkplain
* Listener#onFrameProcessingEnded() ended}, it will attempt to cancel processing any input frames
* that have already become available. Input frames that become available after release are
* ignored.
* <p>If the {@code VideoFrameProcessor} is released before it has {@linkplain Listener#onEnded()
* ended}, it will attempt to cancel processing any input frames that have already become
* available. Input frames that become available after release are ignored.
*
* <p>This method blocks until all resources are released or releasing times out.
*

View File

@ -69,8 +69,6 @@ import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryExcep
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes;
@ -78,6 +76,8 @@ import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.TraceUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.VideoRendererEventListener.EventDispatcher;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
@ -144,7 +144,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private final Context context;
private final VideoFrameReleaseHelper frameReleaseHelper;
private final EventDispatcher eventDispatcher;
private final FrameProcessorManager frameProcessorManager;
private final VideoFrameProcessorManager videoFrameProcessorManager;
private final long allowedJoiningTimeMs;
private final int maxDroppedFramesToNotify;
private final boolean deviceNeedsNoPostProcessWorkaround;
@ -348,7 +348,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
this.context = context.getApplicationContext();
frameReleaseHelper = new VideoFrameReleaseHelper(this.context);
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
frameProcessorManager = new FrameProcessorManager(frameReleaseHelper, /* renderer= */ this);
videoFrameProcessorManager =
new VideoFrameProcessorManager(frameReleaseHelper, /* renderer= */ this);
deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround();
joiningDeadlineMs = C.TIME_UNSET;
scalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
@ -559,8 +560,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
super.onPositionReset(positionUs, joining);
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.flush();
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.flush();
}
clearRenderedFirstFrame();
frameReleaseHelper.onPositionReset();
@ -577,8 +578,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
public boolean isEnded() {
boolean isEnded = super.isEnded();
if (frameProcessorManager.isEnabled()) {
isEnded &= frameProcessorManager.releasedLastFrame();
if (videoFrameProcessorManager.isEnabled()) {
isEnded &= videoFrameProcessorManager.releasedLastFrame();
}
return isEnded;
}
@ -586,7 +587,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
public boolean isReady() {
if (super.isReady()
&& (!frameProcessorManager.isEnabled() || frameProcessorManager.isReady())
&& (!videoFrameProcessorManager.isEnabled() || videoFrameProcessorManager.isReady())
&& (renderedFirstFrameAfterReset
|| (placeholderSurface != null && displaySurface == placeholderSurface)
|| getCodec() == null
@ -646,8 +647,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
try {
super.onReset();
} finally {
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.reset();
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.reset();
}
if (placeholderSurface != null) {
releasePlaceholderSurface();
@ -687,14 +688,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
case MSG_SET_VIDEO_EFFECTS:
@SuppressWarnings("unchecked")
List<Effect> videoEffects = (List<Effect>) checkNotNull(message);
frameProcessorManager.setVideoEffects(videoEffects);
videoFrameProcessorManager.setVideoEffects(videoEffects);
break;
case MSG_SET_VIDEO_OUTPUT_RESOLUTION:
Size outputResolution = (Size) checkNotNull(message);
if (outputResolution.getWidth() != 0
&& outputResolution.getHeight() != 0
&& displaySurface != null) {
frameProcessorManager.setOutputSurfaceInfo(displaySurface, outputResolution);
videoFrameProcessorManager.setOutputSurfaceInfo(displaySurface, outputResolution);
}
break;
case MSG_SET_AUDIO_ATTRIBUTES:
@ -733,7 +734,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@State int state = getState();
@Nullable MediaCodecAdapter codec = getCodec();
if (codec != null && !frameProcessorManager.isEnabled()) {
if (codec != null && !videoFrameProcessorManager.isEnabled()) {
if (Util.SDK_INT >= 23 && displaySurface != null && !codecNeedsSetOutputSurfaceWorkaround) {
setOutputSurfaceV23(codec, displaySurface);
} else {
@ -750,17 +751,17 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Set joining deadline to report MediaCodecVideoRenderer is ready.
setJoiningDeadlineMs();
}
// When FrameProcessorManager is enabled, set FrameProcessorManager's display surface and an
// unknown size.
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.setOutputSurfaceInfo(displaySurface, Size.UNKNOWN);
// When VideoFrameProcessorManager is enabled, set VideoFrameProcessorManager's display
// surface and an unknown size.
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.setOutputSurfaceInfo(displaySurface, Size.UNKNOWN);
}
} else {
// The display surface has been removed.
clearReportedVideoSize();
clearRenderedFirstFrame();
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.clearOutputSurfaceInfo();
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.clearOutputSurfaceInfo();
}
}
} else if (displaySurface != null && displaySurface != placeholderSurface) {
@ -813,16 +814,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
displaySurface = placeholderSurface;
}
if (frameProcessorManager.isEnabled()) {
mediaFormat = frameProcessorManager.amendMediaFormatKeys(mediaFormat);
if (videoFrameProcessorManager.isEnabled()) {
mediaFormat = videoFrameProcessorManager.amendMediaFormatKeys(mediaFormat);
}
return MediaCodecAdapter.Configuration.createForVideoDecoding(
codecInfo,
mediaFormat,
format,
frameProcessorManager.isEnabled()
? frameProcessorManager.getInputSurface()
videoFrameProcessorManager.isEnabled()
? videoFrameProcessorManager.getInputSurface()
: displaySurface,
crypto);
}
@ -852,8 +853,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
super.render(positionUs, elapsedRealtimeUs);
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
}
}
@ -960,8 +961,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@CallSuper
@Override
protected void onReadyToInitializeCodec(Format format) throws ExoPlaybackException {
if (!frameProcessorManager.isEnabled()) {
frameProcessorManager.maybeEnable(format);
if (!videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.maybeEnable(format);
}
}
@ -978,7 +979,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
if (Util.SDK_INT >= 23 && tunneling) {
tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(checkNotNull(getCodec()));
}
frameProcessorManager.onCodecInitialized(name);
videoFrameProcessorManager.onCodecInitialized(name);
}
@Override
@ -1066,16 +1067,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
height = rotatedHeight;
pixelWidthHeightRatio = 1 / pixelWidthHeightRatio;
}
} else if (!frameProcessorManager.isEnabled()) {
// Neither the codec nor the FrameProcessor applies the rotation.
} else if (!videoFrameProcessorManager.isEnabled()) {
// Neither the codec nor the VideoFrameProcessor applies the rotation.
unappliedRotationDegrees = format.rotationDegrees;
}
decodedVideoSize =
new VideoSize(width, height, unappliedRotationDegrees, pixelWidthHeightRatio);
frameReleaseHelper.onFormatChanged(format.frameRate);
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.setInputFormat(
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.setInputFormat(
format
.buildUpon()
.setWidth(width)
@ -1138,7 +1139,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
if (bufferPresentationTimeUs != lastBufferPresentationTimeUs) {
if (!frameProcessorManager.isEnabled()) {
if (!videoFrameProcessorManager.isEnabled()) {
frameReleaseHelper.onNextFrame(bufferPresentationTimeUs);
} // else, update the frameReleaseHelper when releasing the processed frames.
this.lastBufferPresentationTimeUs = bufferPresentationTimeUs;
@ -1176,9 +1177,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean forceRenderOutputBuffer = shouldForceRender(positionUs, earlyUs);
if (forceRenderOutputBuffer) {
boolean notifyFrameMetaDataListener;
if (frameProcessorManager.isEnabled()) {
if (videoFrameProcessorManager.isEnabled()) {
notifyFrameMetaDataListener = false;
if (!frameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) {
if (!videoFrameProcessorManager.maybeRegisterFrame(
format, presentationTimeUs, isLastBuffer)) {
return false;
}
} else {
@ -1200,7 +1202,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs = frameReleaseHelper.adjustReleaseTime(unadjustedFrameReleaseTimeNs);
if (!frameProcessorManager.isEnabled()) {
if (!videoFrameProcessorManager.isEnabled()) {
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
} // else, use the unadjusted earlyUs in previewing use cases.
@ -1218,9 +1220,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return true;
}
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
if (frameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) {
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.releaseProcessedFrames(positionUs, elapsedRealtimeUs);
if (videoFrameProcessorManager.maybeRegisterFrame(format, presentationTimeUs, isLastBuffer)) {
renderOutputBufferNow(
codec,
format,
@ -1453,8 +1455,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount);
}
flushOrReinitializeCodec();
if (frameProcessorManager.isEnabled()) {
frameProcessorManager.flush();
if (videoFrameProcessorManager.isEnabled()) {
videoFrameProcessorManager.flush();
}
return true;
}
@ -1509,11 +1511,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean notifyFrameMetadataListener) {
// In previewing mode, use the presentation time as release time so that the SurfaceTexture is
// accompanied by the rendered frame's presentation time. Setting a realtime based release time
// is only relevant when rendering to a SurfaceView (that is when not using FrameProcessor) for
// better frame release. In previewing mode MediaCodec renders to FrameProcessor's input
// surface, which is not a SurfaceView.
// is only relevant when rendering to a SurfaceView (that is when not using VideoFrameProcessor)
// for better frame release. In previewing mode MediaCodec renders to VideoFrameProcessor's
// input surface, which is not a SurfaceView.
long releaseTimeNs =
frameProcessorManager.isEnabled()
videoFrameProcessorManager.isEnabled()
? (presentationTimeUs + getOutputStreamOffsetUs()) * 1000
: System.nanoTime();
if (notifyFrameMetadataListener) {
@ -1530,9 +1532,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is less than 21.
*
* <p>When frame processing is {@linkplain FrameProcessorManager#isEnabled()} enabled}, this
* method renders to {@link FrameProcessorManager}'s {@linkplain
* FrameProcessorManager#getInputSurface() input surface}.
* <p>When video frame processing is {@linkplain VideoFrameProcessorManager#isEnabled()} enabled},
* this method renders to {@link VideoFrameProcessorManager}'s {@linkplain
* VideoFrameProcessorManager#getInputSurface() input surface}.
*
* @param codec The codec that owns the output buffer.
* @param index The index of the output buffer to drop.
@ -1544,7 +1546,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
TraceUtil.endSection();
decoderCounters.renderedOutputBufferCount++;
consecutiveDroppedFrameCount = 0;
if (!frameProcessorManager.isEnabled()) {
if (!videoFrameProcessorManager.isEnabled()) {
lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
maybeNotifyVideoSizeChanged(decodedVideoSize);
maybeNotifyRenderedFirstFrame();
@ -1555,9 +1557,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* Renders the output buffer with the specified index. This method is only called if the platform
* API version of the device is 21 or later.
*
* <p>When frame processing is {@linkplain FrameProcessorManager#isEnabled()} enabled}, this
* method renders to {@link FrameProcessorManager}'s {@linkplain
* FrameProcessorManager#getInputSurface() input surface}.
* <p>When video frame processing is {@linkplain VideoFrameProcessorManager#isEnabled()} enabled},
* this method renders to {@link VideoFrameProcessorManager}'s {@linkplain
* VideoFrameProcessorManager#getInputSurface() input surface}.
*
* @param codec The codec that owns the output buffer.
* @param index The index of the output buffer to drop.
@ -1572,7 +1574,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
TraceUtil.endSection();
decoderCounters.renderedOutputBufferCount++;
consecutiveDroppedFrameCount = 0;
if (!frameProcessorManager.isEnabled()) {
if (!videoFrameProcessorManager.isEnabled()) {
lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
maybeNotifyVideoSizeChanged(decodedVideoSize);
maybeNotifyRenderedFirstFrame();
@ -1830,8 +1832,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return new MediaCodecVideoDecoderException(cause, codecInfo, displaySurface);
}
/** Manages {@link FrameProcessor} interactions. */
private static final class FrameProcessorManager {
/** Manages {@link VideoFrameProcessor} interactions. */
private static final class VideoFrameProcessorManager {
/** The threshold for releasing a processed frame. */
private static final long EARLY_THRESHOLD_US = 50_000;
@ -1844,7 +1846,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private final ArrayDeque<Pair<Long, Format>> pendingFrameFormats;
private @MonotonicNonNull Handler handler;
@Nullable private FrameProcessor frameProcessor;
@Nullable private VideoFrameProcessor videoFrameProcessor;
@Nullable private CopyOnWriteArrayList<Effect> videoEffects;
@Nullable private Format inputFormat;
/**
@ -1854,16 +1856,18 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
@Nullable private Pair<Surface, Size> currentSurfaceAndSize;
private int frameProcessorMaxPendingFrameCount;
private int videoFrameProcessorMaxPendingFrameCount;
private boolean canEnableFrameProcessing;
/**
* Whether the last frame of the current stream is decoded and registered to {@link
* FrameProcessor}.
* VideoFrameProcessor}.
*/
private boolean registeredLastFrame;
/** Whether the last frame of the current stream is processed by the {@link FrameProcessor}. */
/**
* Whether the last frame of the current stream is processed by the {@link VideoFrameProcessor}.
*/
private boolean processedLastFrame;
/** Whether the last frame of the current stream is released to the output {@link Surface}. */
@ -1876,14 +1880,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
private long pendingOutputSizeChangeNotificationTimeUs;
/** Creates a new instance. */
public FrameProcessorManager(
public VideoFrameProcessorManager(
VideoFrameReleaseHelper frameReleaseHelper,
@UnderInitialization MediaCodecVideoRenderer renderer) {
this.frameReleaseHelper = frameReleaseHelper;
this.renderer = renderer;
processedFramesTimestampsUs = new ArrayDeque<>();
pendingFrameFormats = new ArrayDeque<>();
frameProcessorMaxPendingFrameCount = C.LENGTH_UNSET;
videoFrameProcessorMaxPendingFrameCount = C.LENGTH_UNSET;
canEnableFrameProcessing = true;
lastCodecBufferPresentationTimestampUs = C.TIME_UNSET;
processedFrameSize = VideoSize.UNKNOWN;
@ -1900,30 +1904,32 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
this.videoEffects.addAll(videoEffects);
}
/** Returns whether frame processing is enabled. */
/** Returns whether video frame processing is enabled. */
public boolean isEnabled() {
return frameProcessor != null;
return videoFrameProcessor != null;
}
/** Returns whether {@code FrameProcessorManager} is ready to accept input frames. */
/** Returns whether {@code VideoFrameProcessorManager} is ready to accept input frames. */
public boolean isReady() {
return currentSurfaceAndSize == null || !currentSurfaceAndSize.second.equals(Size.UNKNOWN);
}
/** Whether the {@link FrameProcessor} has released the last frame in the current stream. */
/**
* Whether the {@link VideoFrameProcessor} has released the last frame in the current stream.
*/
public boolean releasedLastFrame() {
return releasedLastFrame;
}
/**
* Flushes the {@link FrameProcessor}.
* Flushes the {@link VideoFrameProcessor}.
*
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is enabled} before calling
* this method.
* <p>Caller must ensure video frame processing {@linkplain #isEnabled() is enabled} before
* calling this method.
*/
public void flush() {
checkStateNotNull(frameProcessor);
frameProcessor.flush();
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.flush();
processedFramesTimestampsUs.clear();
handler.removeCallbacksAndMessages(/* token= */ null);
@ -1935,14 +1941,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
/**
* Tries to enable frame processing.
* Tries to enable video frame processing.
*
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is not enabled} before
* <p>Caller must ensure video frame processing {@linkplain #isEnabled() is not enabled} before
* calling this method.
*
* @param inputFormat The {@link Format} that is input into the {@link FrameProcessor}.
* @return Whether frame processing is enabled.
* @throws ExoPlaybackException When enabling the {@link FrameProcessor} failed.
* @param inputFormat The {@link Format} that is input into the {@link VideoFrameProcessor}.
* @return Whether video frame processing is enabled.
* @throws ExoPlaybackException When enabling the {@link VideoFrameProcessor} failed.
*/
@CanIgnoreReturnValue
public boolean maybeEnable(Format inputFormat) throws ExoPlaybackException {
@ -1977,11 +1983,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
// Insert as the first effect as if the decoder has applied the rotation.
videoEffects.add(
/* index= */ 0,
FrameProcessorAccessor.createRotationEffect(inputFormat.rotationDegrees));
VideoFrameProcessorAccessor.createRotationEffect(inputFormat.rotationDegrees));
}
frameProcessor =
FrameProcessorAccessor.getFrameProcessorFactory()
videoFrameProcessor =
VideoFrameProcessorAccessor.getFrameProcessorFactory()
.create(
renderer.context,
checkNotNull(videoEffects),
@ -1991,19 +1997,20 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/* isInputTextureExternal= */ true,
/* releaseFramesAutomatically= */ false,
/* executor= */ handler::post,
new FrameProcessor.Listener() {
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
@Nullable Format inputFormat = FrameProcessorManager.this.inputFormat;
@Nullable Format inputFormat = VideoFrameProcessorManager.this.inputFormat;
checkStateNotNull(inputFormat);
// TODO(b/264889146): Handle Effect that changes output size based on pts.
processedFrameSize =
new VideoSize(
width,
height,
// FrameProcessor is configured to produce rotation free frames.
// VideoFrameProcessor is configured to produce rotation free
// frames.
/* unappliedRotationDegrees= */ 0,
// FrameProcessor always outputs pixelWidthHeightRatio 1.
// VideoFrameProcessor always outputs pixelWidthHeightRatio 1.
/* pixelWidthHeightRatio= */ 1.f);
pendingOutputSizeChange = true;
}
@ -2027,27 +2034,27 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
public void onError(VideoFrameProcessingException exception) {
renderer.setPendingPlaybackException(
renderer.createRendererException(
exception,
inputFormat,
PlaybackException.ERROR_CODE_FRAME_PROCESSING_FAILED));
PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
@Override
public void onFrameProcessingEnded() {
public void onEnded() {
throw new IllegalStateException();
}
});
} catch (Exception e) {
throw renderer.createRendererException(
e, inputFormat, PlaybackException.ERROR_CODE_FRAME_PROCESSOR_INIT_FAILED);
e, inputFormat, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
if (currentSurfaceAndSize != null) {
Size outputSurfaceSize = currentSurfaceAndSize.second;
frameProcessor.setOutputSurfaceInfo(
videoFrameProcessor.setOutputSurfaceInfo(
new SurfaceInfo(
currentSurfaceAndSize.first,
outputSurfaceSize.getWidth(),
@ -2059,20 +2066,20 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
/**
* Returns the {@linkplain FrameProcessor#getInputSurface input surface} of the {@link
* FrameProcessor}.
* Returns the {@linkplain VideoFrameProcessor#getInputSurface input surface} of the {@link
* VideoFrameProcessor}.
*
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling
* this method.
* <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* calling this method.
*/
public Surface getInputSurface() {
return checkNotNull(frameProcessor).getInputSurface();
return checkNotNull(videoFrameProcessor).getInputSurface();
}
/**
* Sets the output surface info.
*
* @param outputSurface The {@link Surface} to which {@link FrameProcessor} outputs.
* @param outputSurface The {@link Surface} to which {@link VideoFrameProcessor} outputs.
* @param outputResolution The {@link Size} of the output resolution.
*/
public void setOutputSurfaceInfo(Surface outputSurface, Size outputResolution) {
@ -2083,7 +2090,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
currentSurfaceAndSize = Pair.create(outputSurface, outputResolution);
if (isEnabled()) {
checkNotNull(frameProcessor)
checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(
outputSurface, outputResolution.getWidth(), outputResolution.getHeight()));
@ -2093,22 +2100,22 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/**
* Clears the set output surface info.
*
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling
* this method.
* <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* calling this method.
*/
public void clearOutputSurfaceInfo() {
checkNotNull(frameProcessor).setOutputSurfaceInfo(null);
checkNotNull(videoFrameProcessor).setOutputSurfaceInfo(null);
currentSurfaceAndSize = null;
}
/**
* Sets the input surface info.
*
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling
* this method.
* <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* calling this method.
*/
public void setInputFormat(Format inputFormat) {
checkNotNull(frameProcessor)
checkNotNull(videoFrameProcessor)
.setInputFrameInfo(
new FrameInfo.Builder(inputFormat.width, inputFormat.height)
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
@ -2123,7 +2130,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
}
/** Sets the necessary {@link MediaFormat} keys for frame processing. */
/** Sets the necessary {@link MediaFormat} keys for video frame processing. */
@SuppressWarnings("InlinedApi")
public MediaFormat amendMediaFormatKeys(MediaFormat mediaFormat) {
if (Util.SDK_INT >= 29
@ -2136,31 +2143,32 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/**
* Must be called when the codec is initialized.
*
* <p>Sets the {@code frameProcessorMaxPendingFrameCount} based on the {@code codecName}.
* <p>Sets the {@code videoFrameProcessorMaxPendingFrameCount} based on the {@code codecName}.
*/
public void onCodecInitialized(String codecName) {
frameProcessorMaxPendingFrameCount =
videoFrameProcessorMaxPendingFrameCount =
Util.getMaxPendingFramesCountForMediaCodecEncoders(
renderer.context, codecName, /* requestedHdrToneMapping= */ false);
}
/**
* Tries to {@linkplain FrameProcessor#registerInputFrame register an input frame}.
* Tries to {@linkplain VideoFrameProcessor#registerInputFrame register an input frame}.
*
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling
* this method.
* <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* calling this method.
*
* @param format The {@link Format} associated with the frame.
* @param isLastBuffer Whether the buffer is the last from the decoder to register.
* @return Whether {@link MediaCodec} should render the frame to {@link FrameProcessor}.
* @return Whether {@link MediaCodec} should render the frame to {@link VideoFrameProcessor}.
*/
public boolean maybeRegisterFrame(
Format format, long presentationTimestampUs, boolean isLastBuffer) {
checkStateNotNull(frameProcessor);
checkState(frameProcessorMaxPendingFrameCount != C.LENGTH_UNSET);
checkStateNotNull(videoFrameProcessor);
checkState(videoFrameProcessorMaxPendingFrameCount != C.LENGTH_UNSET);
checkState(!registeredLastFrame);
if (frameProcessor.getPendingInputFrameCount() < frameProcessorMaxPendingFrameCount) {
frameProcessor.registerInputFrame();
if (videoFrameProcessor.getPendingInputFrameCount()
< videoFrameProcessorMaxPendingFrameCount) {
videoFrameProcessor.registerInputFrame();
if (currentFrameFormat == null) {
currentFrameFormat = Pair.create(presentationTimestampUs, format);
@ -2181,11 +2189,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/**
* Releases the processed frames to the {@linkplain #setOutputSurfaceInfo output surface}.
*
* <p>Caller must ensure the {@code FrameProcessorManager} {@link #isEnabled()} before calling
* this method.
* <p>Caller must ensure the {@code VideoFrameProcessorManager} {@link #isEnabled()} before
* calling this method.
*/
public void releaseProcessedFrames(long positionUs, long elapsedRealtimeUs) {
checkStateNotNull(frameProcessor);
checkStateNotNull(videoFrameProcessor);
while (!processedFramesTimestampsUs.isEmpty()) {
boolean isStarted = renderer.getState() == STATE_STARTED;
long bufferPresentationTimeUs = checkNotNull(processedFramesTimestampsUs.peek());
@ -2201,7 +2209,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
boolean shouldReleaseFrameImmediately = renderer.shouldForceRender(positionUs, earlyUs);
if (shouldReleaseFrameImmediately) {
releaseProcessedFrameInternal(
FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY, isLastFrame);
break;
} else if (!isStarted || positionUs == renderer.initialPositionUs) {
return;
@ -2220,9 +2228,9 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
earlyUs = (adjustedFrameReleaseTimeNs - System.nanoTime()) / 1000;
// TODO(b/238302341) Handle very late buffers and drop to key frame. Need to flush
// FrameProcessor input frames in this case.
// VideoFrameProcessor input frames in this case.
if (renderer.shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastFrame)) {
releaseProcessedFrameInternal(FrameProcessor.DROP_OUTPUT_FRAME, isLastFrame);
releaseProcessedFrameInternal(VideoFrameProcessor.DROP_OUTPUT_FRAME, isLastFrame);
continue;
}
@ -2245,12 +2253,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/**
* Releases the resources.
*
* <p>Caller must ensure frame processing {@linkplain #isEnabled() is not enabled} before
* <p>Caller must ensure video frame processing {@linkplain #isEnabled() is not enabled} before
* calling this method.
*/
public void reset() {
checkNotNull(frameProcessor).release();
frameProcessor = null;
checkNotNull(videoFrameProcessor).release();
videoFrameProcessor = null;
if (handler != null) {
handler.removeCallbacksAndMessages(/* token= */ null);
}
@ -2262,11 +2270,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
private void releaseProcessedFrameInternal(long releaseTimeNs, boolean isLastFrame) {
checkStateNotNull(frameProcessor);
frameProcessor.releaseOutputFrame(releaseTimeNs);
checkStateNotNull(videoFrameProcessor);
videoFrameProcessor.releaseOutputFrame(releaseTimeNs);
processedFramesTimestampsUs.remove();
renderer.lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000;
if (releaseTimeNs != FrameProcessor.DROP_OUTPUT_FRAME) {
if (releaseTimeNs != VideoFrameProcessor.DROP_OUTPUT_FRAME) {
renderer.maybeNotifyRenderedFirstFrame();
}
if (isLastFrame) {
@ -2274,12 +2282,12 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
}
}
private static final class FrameProcessorAccessor {
private static final class VideoFrameProcessorAccessor {
private static @MonotonicNonNull Constructor<?> scaleToFitTransformationBuilderConstructor;
private static @MonotonicNonNull Method setRotationMethod;
private static @MonotonicNonNull Method buildScaleToFitTransformationMethod;
private static @MonotonicNonNull Constructor<?> frameProcessorFactorConstructor;
private static @MonotonicNonNull Constructor<?> videoFrameProcessorFactoryConstructor;
public static Effect createRotationEffect(float rotationDegrees) throws Exception {
prepare();
@ -2288,16 +2296,16 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
return (Effect) checkNotNull(buildScaleToFitTransformationMethod.invoke(builder));
}
public static FrameProcessor.Factory getFrameProcessorFactory() throws Exception {
public static VideoFrameProcessor.Factory getFrameProcessorFactory() throws Exception {
prepare();
return (FrameProcessor.Factory) frameProcessorFactorConstructor.newInstance();
return (VideoFrameProcessor.Factory) videoFrameProcessorFactoryConstructor.newInstance();
}
@EnsuresNonNull({
"ScaleToFitEffectBuilder",
"SetRotationMethod",
"SetRotationMethod",
"FrameProcessorFactoryClass"
"VideoFrameProcessorFactoryClass"
})
private static void prepare() throws Exception {
if (scaleToFitTransformationBuilderConstructor == null
@ -2313,9 +2321,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
buildScaleToFitTransformationMethod =
scaleToFitTransformationBuilderClass.getMethod("build");
}
if (frameProcessorFactorConstructor == null) {
frameProcessorFactorConstructor =
Class.forName("com.google.android.exoplayer2.effect.GlEffectsFrameProcessor$Factory")
if (videoFrameProcessorFactoryConstructor == null) {
videoFrameProcessorFactoryConstructor =
Class.forName(
"com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor$Factory")
.getConstructor();
}
}

View File

@ -35,9 +35,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class ContrastPixelTest {
@ -89,7 +89,7 @@ public class ContrastPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (contrastShaderProgram != null) {
contrastShaderProgram.release();
}
@ -198,7 +198,7 @@ public class ContrastPixelTest {
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
private void setupOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException {
private void setupOutputTexture(int outputWidth, int outputHeight) throws Exception {
int outputTexId =
GlUtil.createTexture(
outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false);

View File

@ -32,9 +32,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -48,7 +48,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class CropPixelTest {
@ -82,7 +82,7 @@ public final class CropPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (cropShaderProgram != null) {
cropShaderProgram.release();
}

View File

@ -28,10 +28,10 @@ import android.graphics.Bitmap;
import android.graphics.Matrix;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.testutil.FrameProcessorTestRunner;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -39,10 +39,10 @@ import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Pixel test for frame processing via {@link GlEffectsFrameProcessor}.
* Pixel test for video frame processing via {@link DefaultVideoFrameProcessor}.
*
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output
* matches expected output, either from a golden file or from another edit.
* <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* output matches expected output, either from a golden file or from another edit.
*
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* bitmaps.
*/
@RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorPixelTest {
public final class DefaultVideoFrameProcessorPixelTest {
public static final String ORIGINAL_PNG_ASSET_PATH =
"media/bitmap/sample_mp4_first_frame/electrical_colors/original.png";
public static final String WRAPPED_CROP_PNG_ASSET_PATH =
@ -81,20 +81,20 @@ public final class GlEffectsFrameProcessorPixelTest {
/** Input video of which we only use the first frame. */
private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner;
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After
public void release() {
checkNotNull(frameProcessorTestRunner).release();
checkNotNull(videoFrameProcessorTestRunner).release();
}
@Test
public void noEffects_matchesGoldenFile() throws Exception {
String testId = "noEffects_matchesGoldenFile";
frameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -105,11 +105,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void noEffects_withImageInput_matchesGoldenFile() throws Exception {
String testId = "noEffects_withImageInput_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setIsInputTextureExternal(false).build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(expectedBitmap);
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -120,7 +120,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void wrappedCrop_withImageInput_matchesGoldenFile() throws Exception {
String testId = "wrappedCrop_withImageInput_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setIsInputTextureExternal(false)
.setEffects(
@ -134,7 +134,7 @@ public final class GlEffectsFrameProcessorPixelTest {
Bitmap originalBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap expectedBitmap = readBitmap(WRAPPED_CROP_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processImageFrameAndEnd(originalBitmap);
Bitmap actualBitmap = videoFrameProcessorTestRunner.processImageFrameAndEnd(originalBitmap);
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -148,13 +148,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void noEffects_withFrameCache_matchesGoldenFile() throws Exception {
String testId = "noEffects_withFrameCache_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new FrameCache(/* capacity= */ 5))
.build();
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -165,11 +165,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void setPixelWidthHeightRatio_matchesGoldenFile() throws Exception {
String testId = "setPixelWidthHeightRatio_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setPixelWidthHeightRatio(2f).build();
Bitmap expectedBitmap = readBitmap(SCALE_WIDE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -182,13 +182,13 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "matrixTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects((MatrixTransformation) (long presentationTimeNs) -> translateRightMatrix)
.build();
Bitmap expectedBitmap = readBitmap(TRANSLATE_RIGHT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -201,7 +201,7 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "matrixAndScaleToFitTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(
(MatrixTransformation) (long presentationTimeUs) -> translateRightMatrix,
@ -209,7 +209,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(TRANSLATE_THEN_ROTATE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -222,13 +222,13 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "bitmapOverlay_matchesGoldenFile";
Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH);
BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new OverlayEffect(ImmutableList.of(bitmapOverlay)))
.build();
Bitmap expectedBitmap = readBitmap(BITMAP_OVERLAY_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -241,7 +241,7 @@ public final class GlEffectsFrameProcessorPixelTest {
String testId = "scaleToFitAndMatrixTransformation_matchesGoldenFile";
Matrix translateRightMatrix = new Matrix();
translateRightMatrix.postTranslate(/* dx= */ 1, /* dy= */ 0);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(
new ScaleToFitTransformation.Builder().setRotationDegrees(45).build(),
@ -249,7 +249,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_TRANSLATE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -260,13 +260,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void presentation_createForHeight_matchesGoldenFile() throws Exception {
String testId = "presentation_createForHeight_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(Presentation.createForHeight(480))
.build();
Bitmap expectedBitmap = readBitmap(REQUEST_OUTPUT_HEIGHT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -277,7 +277,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void cropThenPresentation_matchesGoldenFile() throws Exception {
String testId = "cropThenPresentation_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(
new Crop(
@ -287,7 +287,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(CROP_THEN_ASPECT_RATIO_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -298,13 +298,13 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void scaleToFitTransformation_rotate45_matchesGoldenFile() throws Exception {
String testId = "scaleToFitTransformation_rotate45_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(new ScaleToFitTransformation.Builder().setRotationDegrees(45).build())
.build();
Bitmap expectedBitmap = readBitmap(ROTATE45_SCALE_TO_FIT_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -315,7 +315,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void twoWrappedScaleToFitTransformations_matchesGoldenFile() throws Exception {
String testId = "twoWrappedScaleToFitTransformations_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(
new GlEffectWrapper(
@ -327,7 +327,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(ROTATE_THEN_SCALE_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -346,20 +346,20 @@ public final class GlEffectsFrameProcessorPixelTest {
}
full10StepRotationAndCenterCrop.add(centerCrop);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop")
.setEffects(centerCrop)
.build();
Bitmap centerCropResultBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release();
frameProcessorTestRunner =
Bitmap centerCropResultBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.release();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full10StepRotationAndCenterCrop")
.setEffects(full10StepRotationAndCenterCrop.build())
.build();
Bitmap fullRotationAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -371,11 +371,11 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void increaseBrightness_matchesGoldenFile() throws Exception {
String testId = "increaseBrightness_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId).setEffects(new Brightness(0.5f)).build();
Bitmap expectedBitmap = readBitmap(INCREASE_BRIGHTNESS_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -399,7 +399,7 @@ public final class GlEffectsFrameProcessorPixelTest {
new RgbAdjustment.Builder().setBlueScale(5).build(),
new Rotation(/* degrees= */ 90),
centerCrop);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop")
.setEffects(
@ -407,16 +407,16 @@ public final class GlEffectsFrameProcessorPixelTest {
centerCrop)
.build();
Bitmap centerCropAndBrightnessIncreaseResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release();
frameProcessorTestRunner =
videoFrameProcessorTestRunner.release();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop")
.setEffects(increaseBrightnessFullRotationCenterCrop)
.build();
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -446,7 +446,7 @@ public final class GlEffectsFrameProcessorPixelTest {
new Rotation(/* degrees= */ 90),
new FrameCache(/* capacity= */ 2),
centerCrop);
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("centerCrop")
.setEffects(
@ -454,16 +454,16 @@ public final class GlEffectsFrameProcessorPixelTest {
centerCrop)
.build();
Bitmap centerCropAndBrightnessIncreaseResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd();
frameProcessorTestRunner.release();
frameProcessorTestRunner =
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.release();
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setOutputFileLabel("full4StepRotationBrightnessIncreaseAndCenterCrop")
.setEffects(increaseBrightnessFullRotationCenterCrop)
.build();
Bitmap fullRotationBrightnessIncreaseAndCenterCropResultBitmap =
frameProcessorTestRunner.processFirstFrameAndEnd();
videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -477,7 +477,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Test
public void grayscaleThenIncreaseRedChannel_matchesGoldenFile() throws Exception {
String testId = "grayscaleThenIncreaseRedChannel_matchesGoldenFile";
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setEffects(
RgbFilter.createGrayscaleFilter(),
@ -485,7 +485,7 @@ public final class GlEffectsFrameProcessorPixelTest {
.build();
Bitmap expectedBitmap = readBitmap(GRAYSCALE_THEN_INCREASE_RED_CHANNEL_PNG_ASSET_PATH);
Bitmap actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
float averagePixelAbsoluteDifference =
@ -496,11 +496,11 @@ public final class GlEffectsFrameProcessorPixelTest {
// TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps,
// using GlEffectWrapper to ensure usage of intermediate textures.
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
return new FrameProcessorTestRunner.Builder()
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory())
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory())
.setVideoAssetPath(INPUT_SDR_MP4_ASSET_STRING);
}
@ -538,10 +538,10 @@ public final class GlEffectsFrameProcessorPixelTest {
}
/**
* Wraps a {@link GlEffect} to prevent the {@link GlEffectsFrameProcessor} from detecting its
* Wraps a {@link GlEffect} to prevent the {@link DefaultVideoFrameProcessor} from detecting its
* class and optimizing it.
*
* <p>This ensures that {@link GlEffectsFrameProcessor} uses a separate {@link GlShaderProgram}
* <p>This ensures that {@link DefaultVideoFrameProcessor} uses a separate {@link GlShaderProgram}
* for the wrapped {@link GlEffect} rather than merging it with preceding or subsequent {@link
* GlEffect} instances and applying them in one combined {@link GlShaderProgram}.
*/
@ -555,7 +555,7 @@ public final class GlEffectsFrameProcessorPixelTest {
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return effect.toGlShaderProgram(context, useHdr);
}
}

View File

@ -27,11 +27,11 @@ import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Longs;
@ -50,9 +50,9 @@ import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for frame release in {@link GlEffectsFrameProcessor}. */
/** Tests for frame release in {@link DefaultVideoFrameProcessor}. */
@RunWith(AndroidJUnit4.class)
public final class GlEffectsFrameProcessorFrameReleaseTest {
public final class DefaultVideoFrameProcessorVideoFrameReleaseTest {
private static final int WIDTH = 200;
private static final int HEIGHT = 100;
@ -68,12 +68,12 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
private final LinkedBlockingQueue<Long> outputReleaseTimesNs = new LinkedBlockingQueue<>();
private @MonotonicNonNull GlEffectsFrameProcessor glEffectsFrameProcessor;
private @MonotonicNonNull DefaultVideoFrameProcessor defaultVideoFrameProcessor;
@After
public void release() {
if (glEffectsFrameProcessor != null) {
glEffectsFrameProcessor.release();
if (defaultVideoFrameProcessor != null) {
defaultVideoFrameProcessor.release();
}
}
@ -136,7 +136,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs);
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
},
/* releaseFramesAutomatically= */ false);
@ -149,18 +149,18 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
public void controlledFrameRelease_withOneFrameRequestImmediateRelease_releasesFrame()
throws Exception {
long originalPresentationTimeUs = 1234;
long releaseTimesNs = FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY;
long releaseTimesNs = VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY;
AtomicLong actualPresentationTimeUs = new AtomicLong();
processFramesToEndOfStream(
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimesNs);
checkNotNull(defaultVideoFrameProcessor).releaseOutputFrame(releaseTimesNs);
},
/* releaseFramesAutomatically= */ false);
assertThat(actualPresentationTimeUs.get()).isEqualTo(originalPresentationTimeUs);
// The actual release time is determined by the FrameProcessor when releasing the frame.
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1);
@ -175,14 +175,15 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimeUs.set(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor).releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimeBeforeCurrentTimeNs);
},
/* releaseFramesAutomatically= */ false);
ImmutableList<Long> actualReleaseTimesNs =
waitForFrameReleaseAndGetReleaseTimesNs(/* expectedFrameCount= */ 1);
assertThat(actualReleaseTimesNs).hasSize(1);
// The actual release time is determined by the FrameProcessor when releasing the frame.
// The actual release time is determined by the VideoFrameProcessor when releasing the frame.
assertThat(actualReleaseTimesNs.get(0)).isAtLeast(releaseTimeBeforeCurrentTimeNs);
}
@ -194,8 +195,8 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ new long[] {originalPresentationTimeUs},
/* onFrameAvailableListener= */ presentationTimeNs -> {
actualPresentationTimeUs.set(presentationTimeNs);
checkNotNull(glEffectsFrameProcessor)
.releaseOutputFrame(FrameProcessor.DROP_OUTPUT_FRAME);
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
},
/* releaseFramesAutomatically= */ false);
@ -214,7 +215,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* inputPresentationTimesUs= */ originalPresentationTimesUs,
/* onFrameAvailableListener= */ presentationTimeUs -> {
actualPresentationTimesUs.add(presentationTimeUs);
checkNotNull(glEffectsFrameProcessor)
checkNotNull(defaultVideoFrameProcessor)
.releaseOutputFrame(releaseTimesNs[frameIndex.getAndIncrement()]);
try {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
@ -254,11 +255,11 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
// TODO(b/264252759): Investigate output frames being dropped and remove sleep.
// Frames can be dropped silently between EGL and the ImageReader. Sleep after each call
// to swap buffers, to avoid this behavior.
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[0]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[1]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
glEffectsFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
defaultVideoFrameProcessor.releaseOutputFrame(releaseTimesNs[2]);
Thread.sleep(PER_FRAME_RELEASE_WAIT_TIME_MS);
assertThat(actualPresentationTimesUs)
@ -276,19 +277,19 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
void onFrameAvailable(long presentationTimeUs);
}
@EnsuresNonNull("glEffectsFrameProcessor")
@EnsuresNonNull("defaultVideoFrameProcessor")
private void processFramesToEndOfStream(
long[] inputPresentationTimesUs,
OnFrameAvailableListener onFrameAvailableListener,
boolean releaseFramesAutomatically)
throws Exception {
AtomicReference<@NullableType FrameProcessingException> frameProcessingExceptionReference =
new AtomicReference<>();
AtomicReference<@NullableType VideoFrameProcessingException>
videoFrameProcessingExceptionReference = new AtomicReference<>();
BlankFrameProducer blankFrameProducer = new BlankFrameProducer();
CountDownLatch frameProcessingEndedCountDownLatch = new CountDownLatch(1);
glEffectsFrameProcessor =
CountDownLatch videoFrameProcessingEndedCountDownLatch = new CountDownLatch(1);
defaultVideoFrameProcessor =
checkNotNull(
new GlEffectsFrameProcessor.Factory()
new DefaultVideoFrameProcessor.Factory()
.create(
getApplicationContext(),
ImmutableList.of((GlEffect) (context, useHdr) -> blankFrameProducer),
@ -298,7 +299,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
/* isInputTextureExternal= */ true,
releaseFramesAutomatically,
MoreExecutors.directExecutor(),
new FrameProcessor.Listener() {
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
ImageReader outputImageReader =
@ -307,7 +308,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
height,
PixelFormat.RGBA_8888,
/* maxImages= */ inputPresentationTimesUs.length);
checkNotNull(glEffectsFrameProcessor)
checkNotNull(defaultVideoFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
outputImageReader.setOnImageAvailableListener(
@ -325,34 +326,35 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingExceptionReference.set(exception);
frameProcessingEndedCountDownLatch.countDown();
public void onError(VideoFrameProcessingException exception) {
videoFrameProcessingExceptionReference.set(exception);
videoFrameProcessingEndedCountDownLatch.countDown();
}
@Override
public void onFrameProcessingEnded() {
frameProcessingEndedCountDownLatch.countDown();
public void onEnded() {
videoFrameProcessingEndedCountDownLatch.countDown();
}
}));
glEffectsFrameProcessor
defaultVideoFrameProcessor
.getTaskExecutor()
.submit(
() -> {
blankFrameProducer.configureGlObjects();
checkNotNull(glEffectsFrameProcessor)
checkNotNull(defaultVideoFrameProcessor)
.setInputFrameInfo(new FrameInfo.Builder(WIDTH, HEIGHT).build());
// A frame needs to be registered despite not queuing any external input to ensure
// that
// the frame processor knows about the stream offset.
glEffectsFrameProcessor.registerInputFrame();
// the video frame processor knows about the stream offset.
defaultVideoFrameProcessor.registerInputFrame();
blankFrameProducer.produceBlankFramesAndQueueEndOfStream(inputPresentationTimesUs);
});
frameProcessingEndedCountDownLatch.await();
@Nullable Exception frameProcessingException = frameProcessingExceptionReference.get();
if (frameProcessingException != null) {
throw frameProcessingException;
videoFrameProcessingEndedCountDownLatch.await();
@Nullable
Exception videoFrameProcessingException = videoFrameProcessingExceptionReference.get();
if (videoFrameProcessingException != null) {
throw videoFrameProcessingException;
}
}
@ -374,7 +376,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
private @MonotonicNonNull TextureInfo blankTexture;
private @MonotonicNonNull OutputListener outputListener;
public void configureGlObjects() throws FrameProcessingException {
public void configureGlObjects() throws VideoFrameProcessingException {
try {
int texId =
GlUtil.createTexture(WIDTH, HEIGHT, /* useHighPrecisionColorComponents= */ false);
@ -383,7 +385,7 @@ public final class GlEffectsFrameProcessorFrameReleaseTest {
GlUtil.focusFramebufferUsingCurrentContext(fboId, WIDTH, HEIGHT);
GlUtil.clearOutputFrame();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}

View File

@ -34,9 +34,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -50,7 +50,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class HslAdjustmentPixelTest {
@ -100,7 +100,7 @@ public final class HslAdjustmentPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (hslProcessor != null) {
hslProcessor.release();
}

View File

@ -32,8 +32,8 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -47,7 +47,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class MatrixShaderProgramPixelTest {
@ -87,7 +87,7 @@ public final class MatrixShaderProgramPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}

View File

@ -37,9 +37,9 @@ import android.text.SpannableString;
import android.text.style.ForegroundColorSpan;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -54,7 +54,7 @@ import org.junit.runner.RunWith;
* <p>Expected bitmaps are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class OverlayShaderProgramPixelTest {
@ -101,7 +101,7 @@ public class OverlayShaderProgramPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (overlayShaderProgram != null) {
overlayShaderProgram.release();
}

View File

@ -33,9 +33,9 @@ import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class PresentationPixelTest {
@ -91,7 +91,7 @@ public final class PresentationPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (presentationShaderProgram != null) {
presentationShaderProgram.release();
}

View File

@ -35,9 +35,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -52,7 +52,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class RgbAdjustmentPixelTest {
@ -99,7 +99,7 @@ public final class RgbAdjustmentPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}

View File

@ -33,9 +33,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public final class RgbFilterPixelTest {
@ -94,7 +94,7 @@ public final class RgbFilterPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}

View File

@ -34,9 +34,9 @@ import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
@ -49,7 +49,7 @@ import org.junit.runner.RunWith;
* <p>Expected images are taken from an emulator, so tests on different emulators or physical
* devices may fail. To test on other devices, please increase the {@link
* BitmapPixelTestUtil#MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} and/or inspect the saved output
* bitmaps as recommended in {@link GlEffectsFrameProcessorPixelTest}.
* bitmaps as recommended in {@link DefaultVideoFrameProcessorPixelTest}.
*/
@RunWith(AndroidJUnit4.class)
public class SingleColorLutPixelTest {
@ -88,7 +88,7 @@ public class SingleColorLutPixelTest {
}
@After
public void release() throws GlUtil.GlException, FrameProcessingException {
public void release() throws GlUtil.GlException, VideoFrameProcessingException {
if (colorLutShaderProgram != null) {
colorLutShaderProgram.release();
}

View File

@ -22,9 +22,9 @@ import android.net.Uri;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import com.google.android.exoplayer2.util.BitmapLoader;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.ExecutionException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -42,9 +42,9 @@ public abstract class BitmapOverlay extends TextureOverlay {
* Returns the overlay bitmap displayed at the specified timestamp.
*
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException;
public abstract Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException;
/**
* {@inheritDoc}
@ -59,7 +59,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
}
@Override
public int getTextureId(long presentationTimeUs) throws FrameProcessingException {
public int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException {
Bitmap bitmap = getBitmap(presentationTimeUs);
if (bitmap != lastBitmap) {
try {
@ -77,7 +77,7 @@ public abstract class BitmapOverlay extends TextureOverlay {
/* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
return lastTextureId;
@ -132,14 +132,14 @@ public abstract class BitmapOverlay extends TextureOverlay {
private @MonotonicNonNull Bitmap lastBitmap;
@Override
public Bitmap getBitmap(long presentationTimeUs) throws FrameProcessingException {
public Bitmap getBitmap(long presentationTimeUs) throws VideoFrameProcessingException {
if (lastBitmap == null) {
BitmapLoader bitmapLoader = new SimpleBitmapLoader();
ListenableFuture<Bitmap> future = bitmapLoader.loadBitmap(overlayBitmapUri);
try {
lastBitmap = future.get();
} catch (ExecutionException | InterruptedException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
return lastBitmap;

View File

@ -35,7 +35,7 @@ import java.util.Queue;
private final GlShaderProgram producingGlShaderProgram;
private final GlShaderProgram consumingGlShaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
@GuardedBy("this")
private final Queue<Pair<TextureInfo, Long>> availableFrames;
@ -50,18 +50,18 @@ import java.util.Queue;
* as {@link OutputListener}.
* @param consumingGlShaderProgram The {@link GlShaderProgram} for which this listener will be set
* as {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that is used for
* OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be executed
* by the {@link FrameProcessingTaskExecutor}. The caller is responsible for releasing the
* {@link FrameProcessingTaskExecutor}.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that is
* used for OpenGL calls. All calls to the producing/consuming {@link GlShaderProgram} will be
* executed by the {@link VideoFrameProcessingTaskExecutor}. The caller is responsible for
* releasing the {@link VideoFrameProcessingTaskExecutor}.
*/
public ChainingGlShaderProgramListener(
GlShaderProgram producingGlShaderProgram,
GlShaderProgram consumingGlShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.producingGlShaderProgram = producingGlShaderProgram;
this.consumingGlShaderProgram = consumingGlShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
availableFrames = new ArrayDeque<>();
}
@ -75,9 +75,10 @@ import java.util.Queue;
long presentationTimeUs = pendingFrame.second;
if (presentationTimeUs == C.TIME_END_OF_SOURCE) {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream);
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
} else {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ pendingFrame.first, presentationTimeUs));
@ -86,7 +87,7 @@ import java.util.Queue;
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> producingGlShaderProgram.releaseOutputFrame(inputTexture));
}
@ -94,14 +95,14 @@ import java.util.Queue;
public synchronized void onFlush() {
consumingGlShaderProgramInputCapacity = 0;
availableFrames.clear();
frameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
videoFrameProcessingTaskExecutor.submit(producingGlShaderProgram::flush);
}
@Override
public synchronized void onOutputFrameAvailable(
TextureInfo outputTexture, long presentationTimeUs) {
if (consumingGlShaderProgramInputCapacity > 0) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() ->
consumingGlShaderProgram.queueInputFrame(
/* inputTexture= */ outputTexture, presentationTimeUs));
@ -116,7 +117,8 @@ import java.util.Queue;
if (!availableFrames.isEmpty()) {
availableFrames.add(new Pair<>(TextureInfo.UNSET, C.TIME_END_OF_SOURCE));
} else {
frameProcessingTaskExecutor.submit(consumingGlShaderProgram::signalEndOfCurrentInputStream);
videoFrameProcessingTaskExecutor.submit(
consumingGlShaderProgram::signalEndOfCurrentInputStream);
}
}
}

View File

@ -18,8 +18,8 @@ package com.google.android.exoplayer2.effect;
import android.content.Context;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/**
* Specifies color transformations using color lookup tables to apply to each frame in the fragment
@ -43,7 +43,7 @@ public interface ColorLut extends GlEffect {
@Override
@WorkerThread
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);
}
}

View File

@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
/** Applies a {@link ColorLut} to each frame in the fragment shader. */
@ -41,10 +41,10 @@ import java.io.IOException;
* @param colorLut The {@link ColorLut} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public ColorLutShaderProgram(Context context, ColorLut colorLut, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// TODO(b/246315245): Add HDR support.
checkArgument(!useHdr, "ColorLutShaderProgram does not support HDR colors.");
@ -53,7 +53,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -73,7 +73,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -84,18 +85,18 @@ import java.io.IOException;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
colorLut.release();
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}

View File

@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** A {@link GlEffect} to control the contrast of video frames. */
public class Contrast implements GlEffect {
@ -40,7 +40,7 @@ public class Contrast implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new ContrastShaderProgram(context, this, useHdr);
}
}

View File

@ -18,10 +18,10 @@ package com.google.android.exoplayer2.effect;
import android.content.Context;
import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
/** Applies a {@link Contrast} to each frame in the fragment shader. */
@ -38,10 +38,10 @@ import java.io.IOException;
* @param contrastEffect The {@link Contrast} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public ContrastShaderProgram(Context context, Contrast contrastEffect, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// Use 1.0001f to avoid division by zero issues.
float contrastFactor = (1 + contrastEffect.contrast) / (1.0001f - contrastEffect.contrast);
@ -49,7 +49,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -70,7 +70,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -79,17 +80,17 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}

View File

@ -36,11 +36,11 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
@ -53,13 +53,13 @@ import java.util.concurrent.Future;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A {@link FrameProcessor} implementation that applies {@link GlEffect} instances using OpenGL on a
* background thread.
* A {@link VideoFrameProcessor} implementation that applies {@link GlEffect} instances using OpenGL
* on a background thread.
*/
public final class GlEffectsFrameProcessor implements FrameProcessor {
public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
/** A factory for {@link GlEffectsFrameProcessor} instances. */
public static class Factory implements FrameProcessor.Factory {
/** A factory for {@link DefaultVideoFrameProcessor} instances. */
public static class Factory implements VideoFrameProcessor.Factory {
/**
* {@inheritDoc}
*
@ -86,11 +86,11 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_1010102}. Otherwise, the context
* will be configured with {@link GlUtil#EGL_CONFIG_ATTRIBUTES_RGBA_8888}.
*
* <p>If invoking the {@code listener} on {@link GlEffectsFrameProcessor}'s internal thread is
* desired, pass a {@link MoreExecutors#directExecutor() direct listenerExecutor}.
* <p>If invoking the {@code listener} on {@link DefaultVideoFrameProcessor}'s internal thread
* is desired, pass a {@link MoreExecutors#directExecutor() direct listenerExecutor}.
*/
@Override
public GlEffectsFrameProcessor create(
public DefaultVideoFrameProcessor create(
Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider,
@ -100,7 +100,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
boolean releaseFramesAutomatically,
Executor listenerExecutor,
Listener listener)
throws FrameProcessingException {
throws VideoFrameProcessingException {
// TODO(b/261188041) Add tests to verify the Listener is invoked on the given Executor.
checkArgument(inputColorInfo.isValid());
@ -124,7 +124,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ExecutorService singleThreadExecutorService = Util.newSingleThreadExecutor(THREAD_NAME);
Future<GlEffectsFrameProcessor> glFrameProcessorFuture =
Future<DefaultVideoFrameProcessor> glFrameProcessorFuture =
singleThreadExecutorService.submit(
() ->
createOpenGlObjectsAndFrameProcessor(
@ -142,10 +142,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
try {
return glFrameProcessorFuture.get();
} catch (ExecutionException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}
@ -153,7 +153,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
/**
* Creates the OpenGL context, surfaces, textures, and frame buffers, initializes {@link
* GlShaderProgram} instances corresponding to the {@link GlEffect} instances, and returns a new
* {@code GlEffectsFrameProcessor}.
* {@code DefaultVideoFrameProcessor}.
*
* <p>All {@link Effect} instances must be {@link GlEffect} instances.
*
@ -161,7 +161,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* commands will be called on that thread.
*/
@WorkerThread
private static GlEffectsFrameProcessor createOpenGlObjectsAndFrameProcessor(
private static DefaultVideoFrameProcessor createOpenGlObjectsAndFrameProcessor(
Context context,
List<Effect> effects,
DebugViewProvider debugViewProvider,
@ -172,7 +172,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
ExecutorService singleThreadExecutorService,
Executor executor,
Listener listener)
throws GlUtil.GlException, FrameProcessingException {
throws GlUtil.GlException, VideoFrameProcessingException {
checkState(Thread.currentThread().getName().equals(THREAD_NAME));
// TODO(b/237674316): Delay initialization of things requiring the colorInfo, to
@ -196,7 +196,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
GlUtil.destroyEglContext(eglDisplay, eglContext);
// On API<33, the system cannot display PQ content correctly regardless of whether BT2020 PQ
// GL extension is supported.
throw new FrameProcessingException("BT.2020 PQ OpenGL output isn't supported.");
throw new VideoFrameProcessingException("BT.2020 PQ OpenGL output isn't supported.");
}
}
@ -213,16 +213,16 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
releaseFramesAutomatically,
executor,
listener);
FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(singleThreadExecutorService, listener);
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(singleThreadExecutorService, listener);
chainShaderProgramsWithListeners(
shaderPrograms, frameProcessingTaskExecutor, listener, executor);
shaderPrograms, videoFrameProcessingTaskExecutor, listener, executor);
return new GlEffectsFrameProcessor(
return new DefaultVideoFrameProcessor(
eglDisplay,
eglContext,
isInputTextureExternal,
frameProcessingTaskExecutor,
videoFrameProcessingTaskExecutor,
shaderPrograms,
releaseFramesAutomatically);
}
@ -250,7 +250,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
boolean releaseFramesAutomatically,
Executor executor,
Listener listener)
throws FrameProcessingException {
throws VideoFrameProcessingException {
ImmutableList.Builder<GlShaderProgram> shaderProgramListBuilder = new ImmutableList.Builder<>();
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
new ImmutableList.Builder<>();
@ -264,7 +264,8 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
.build();
for (int i = 0; i < effects.size(); i++) {
Effect effect = effects.get(i);
checkArgument(effect instanceof GlEffect, "GlEffectsFrameProcessor only supports GlEffects");
checkArgument(
effect instanceof GlEffect, "DefaultVideoFrameProcessor only supports GlEffects");
GlEffect glEffect = (GlEffect) effect;
// The following logic may change the order of the RgbMatrix and GlMatrixTransformation
// effects. This does not influence the output since RgbMatrix only changes the individual
@ -331,18 +332,18 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
*/
private static void chainShaderProgramsWithListeners(
ImmutableList<GlShaderProgram> shaderPrograms,
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
Listener frameProcessorListener,
Executor frameProcessorListenerExecutor) {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
Listener videoFrameProcessorListener,
Executor videoFrameProcessorListenerExecutor) {
for (int i = 0; i < shaderPrograms.size() - 1; i++) {
GlShaderProgram producingGlShaderProgram = shaderPrograms.get(i);
GlShaderProgram consumingGlShaderProgram = shaderPrograms.get(i + 1);
ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener(
producingGlShaderProgram, consumingGlShaderProgram, frameProcessingTaskExecutor);
producingGlShaderProgram, consumingGlShaderProgram, videoFrameProcessingTaskExecutor);
producingGlShaderProgram.setOutputListener(chainingGlShaderProgramListener);
producingGlShaderProgram.setErrorListener(
frameProcessorListenerExecutor, frameProcessorListener::onFrameProcessingError);
videoFrameProcessorListenerExecutor, videoFrameProcessorListener::onError);
consumingGlShaderProgram.setInputListener(chainingGlShaderProgramListener);
}
}
@ -352,7 +353,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private final EGLDisplay eglDisplay;
private final EGLContext eglContext;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private @MonotonicNonNull InternalTextureManager inputInternalTextureManager;
private @MonotonicNonNull ExternalTextureManager inputExternalTextureManager;
private final boolean releaseFramesAutomatically;
@ -368,18 +369,18 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded;
private GlEffectsFrameProcessor(
private DefaultVideoFrameProcessor(
EGLDisplay eglDisplay,
EGLContext eglContext,
boolean isInputTextureExternal,
FrameProcessingTaskExecutor frameProcessingTaskExecutor,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
ImmutableList<GlShaderProgram> shaderPrograms,
boolean releaseFramesAutomatically)
throws FrameProcessingException {
throws VideoFrameProcessingException {
this.eglDisplay = eglDisplay;
this.eglContext = eglContext;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
this.releaseFramesAutomatically = releaseFramesAutomatically;
checkState(!shaderPrograms.isEmpty());
@ -391,11 +392,11 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(inputShaderProgram instanceof ExternalShaderProgram);
inputExternalTextureManager =
new ExternalTextureManager(
(ExternalShaderProgram) inputShaderProgram, frameProcessingTaskExecutor);
(ExternalShaderProgram) inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputExternalTextureManager);
} else {
inputInternalTextureManager =
new InternalTextureManager(inputShaderProgram, frameProcessingTaskExecutor);
new InternalTextureManager(inputShaderProgram, videoFrameProcessingTaskExecutor);
inputShaderProgram.setInputListener(inputInternalTextureManager);
}
@ -404,10 +405,10 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
previousStreamOffsetUs = C.TIME_UNSET;
}
/** Returns the task executor that runs frame processing tasks. */
/** Returns the task executor that runs video frame processing tasks. */
@VisibleForTesting
/* package */ FrameProcessingTaskExecutor getTaskExecutor() {
return frameProcessingTaskExecutor;
/* package */ VideoFrameProcessingTaskExecutor getTaskExecutor() {
return videoFrameProcessingTaskExecutor;
}
/**
@ -419,7 +420,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
* call this method after instantiation to ensure that buffers are handled at full resolution. See
* {@link SurfaceTexture#setDefaultBufferSize(int, int)} for more information.
*
* <p>This method should only be used for when the {@link FrameProcessor}'s {@code
* <p>This method should only be used for when the {@link VideoFrameProcessor}'s {@code
* isInputTextureExternal} parameter is set to {@code true}.
*
* @param width The default width for input buffers, in pixels.
@ -474,7 +475,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(
!releaseFramesAutomatically,
"Calling this method is not allowed when releaseFramesAutomatically is enabled");
frameProcessingTaskExecutor.submitWithHighPriority(
videoFrameProcessingTaskExecutor.submitWithHighPriority(
() -> finalShaderProgramWrapper.releaseOutputFrame(releaseTimeNs));
}
@ -483,20 +484,20 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
checkState(!inputStreamEnded);
inputStreamEnded = true;
if (inputInternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
videoFrameProcessingTaskExecutor.submit(inputInternalTextureManager::signalEndOfInput);
}
if (inputExternalTextureManager != null) {
frameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
videoFrameProcessingTaskExecutor.submit(inputExternalTextureManager::signalEndOfInput);
}
}
@Override
public void flush() {
try {
frameProcessingTaskExecutor.flush();
videoFrameProcessingTaskExecutor.flush();
CountDownLatch latch = new CountDownLatch(1);
checkNotNull(inputExternalTextureManager).setOnFlushCompleteListener(latch::countDown);
frameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
videoFrameProcessingTaskExecutor.submit(finalShaderProgramWrapper::flush);
latch.await();
inputExternalTextureManager.setOnFlushCompleteListener(null);
} catch (InterruptedException e) {
@ -507,7 +508,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
@Override
public void release() {
try {
frameProcessingTaskExecutor.release(
videoFrameProcessingTaskExecutor.release(
/* releaseTask= */ this::releaseShaderProgramsAndDestroyGlContext, RELEASE_WAIT_TIME_MS);
} catch (InterruptedException unexpected) {
Thread.currentThread().interrupt();
@ -546,7 +547,7 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
*/
@WorkerThread
private void releaseShaderProgramsAndDestroyGlContext()
throws GlUtil.GlException, FrameProcessingException {
throws GlUtil.GlException, VideoFrameProcessingException {
for (int i = 0; i < allShaderPrograms.size(); i++) {
allShaderPrograms.get(i).release();
}

View File

@ -24,9 +24,9 @@ import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlShaderProgram.InputListener;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicInteger;
*/
/* package */ final class ExternalTextureManager implements InputListener {
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
private final ExternalShaderProgram externalShaderProgram;
private final int externalTexId;
private final Surface surface;
@ -61,7 +61,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Nullable private volatile FrameInfo currentFrame;
// TODO(b/238302341) Remove the use of after flush task, block the calling thread instead.
@Nullable private volatile FrameProcessingTask onFlushCompleteTask;
@Nullable private volatile VideoFrameProcessingTask onFlushCompleteTask;
private long previousStreamOffsetUs;
@ -70,21 +70,21 @@ import java.util.concurrent.atomic.AtomicInteger;
*
* @param externalShaderProgram The {@link ExternalShaderProgram} for which this {@code
* ExternalTextureManager} will be set as the {@link InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor}.
* @throws FrameProcessingException If a problem occurs while creating the external texture.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor}.
* @throws VideoFrameProcessingException If a problem occurs while creating the external texture.
*/
// The onFrameAvailableListener will not be invoked until the constructor returns.
@SuppressWarnings("nullness:method.invocation.invalid")
public ExternalTextureManager(
ExternalShaderProgram externalShaderProgram,
FrameProcessingTaskExecutor frameProcessingTaskExecutor)
throws FrameProcessingException {
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor)
throws VideoFrameProcessingException {
this.externalShaderProgram = externalShaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
try {
externalTexId = GlUtil.createExternalTexture();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
surfaceTexture = new SurfaceTexture(externalTexId);
textureTransformMatrix = new float[16];
@ -93,7 +93,7 @@ import java.util.concurrent.atomic.AtomicInteger;
previousStreamOffsetUs = C.TIME_UNSET;
surfaceTexture.setOnFrameAvailableListener(
unused ->
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--;
@ -119,7 +119,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override
public void onReadyToAcceptInputFrame() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
externalShaderProgramInputCapacity.incrementAndGet();
maybeQueueFrameToExternalShaderProgram();
@ -128,7 +128,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override
public void onInputFrameProcessed(TextureInfo inputTexture) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
currentFrame = null;
maybeQueueFrameToExternalShaderProgram();
@ -136,13 +136,13 @@ import java.util.concurrent.atomic.AtomicInteger;
}
/** Sets the task to run on completing flushing, or {@code null} to clear any task. */
public void setOnFlushCompleteListener(@Nullable FrameProcessingTask task) {
public void setOnFlushCompleteListener(@Nullable VideoFrameProcessingTask task) {
onFlushCompleteTask = task;
}
@Override
public void onFlush() {
frameProcessingTaskExecutor.submit(this::flush);
videoFrameProcessingTaskExecutor.submit(this::flush);
}
/**
@ -169,10 +169,10 @@ import java.util.concurrent.atomic.AtomicInteger;
/**
* Signals the end of the input.
*
* @see FrameProcessor#signalEndOfInput()
* @see VideoFrameProcessor#signalEndOfInput()
*/
public void signalEndOfInput() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
inputStreamEnded = true;
if (pendingFrames.isEmpty() && currentFrame == null) {
@ -204,7 +204,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (onFlushCompleteTask == null || numberOfFramesToDropOnBecomingAvailable > 0) {
return;
}
frameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
videoFrameProcessingTaskExecutor.submitWithHighPriority(onFlushCompleteTask);
}
@WorkerThread

View File

@ -35,13 +35,13 @@ import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import java.util.Queue;
@ -59,7 +59,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* the frames to the dimensions specified by the provided {@link SurfaceInfo}.
*
* <p>This wrapper is used for the final {@link GlShaderProgram} instance in the chain of {@link
* GlShaderProgram} instances used by {@link FrameProcessor}.
* GlShaderProgram} instances used by {@link VideoFrameProcessor}.
*/
/* package */ final class FinalMatrixShaderProgramWrapper implements ExternalShaderProgram {
@ -76,8 +76,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final ColorInfo inputColorInfo;
private final ColorInfo outputColorInfo;
private final boolean releaseFramesAutomatically;
private final Executor frameProcessorListenerExecutor;
private final FrameProcessor.Listener frameProcessorListener;
private final Executor videoFrameProcessorListenerExecutor;
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
private final float[] textureTransformMatrix;
private final Queue<Long> streamOffsetUsQueue;
private final Queue<Pair<TextureInfo, Long>> availableFrames;
@ -112,8 +112,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
boolean sampleFromInputTexture,
boolean isInputTextureExternal,
boolean releaseFramesAutomatically,
Executor frameProcessorListenerExecutor,
FrameProcessor.Listener frameProcessorListener) {
Executor videoFrameProcessorListenerExecutor,
VideoFrameProcessor.Listener videoFrameProcessorListener) {
this.context = context;
this.matrixTransformations = matrixTransformations;
this.rgbMatrices = rgbMatrices;
@ -125,8 +125,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
this.inputColorInfo = inputColorInfo;
this.outputColorInfo = outputColorInfo;
this.releaseFramesAutomatically = releaseFramesAutomatically;
this.frameProcessorListenerExecutor = frameProcessorListenerExecutor;
this.frameProcessorListener = frameProcessorListener;
this.videoFrameProcessorListenerExecutor = videoFrameProcessorListenerExecutor;
this.videoFrameProcessorListener = videoFrameProcessorListener;
textureTransformMatrix = GlUtil.create4x4IdentityMatrix();
streamOffsetUsQueue = new ConcurrentLinkedQueue<>();
@ -142,13 +142,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void setOutputListener(OutputListener outputListener) {
// The FrameProcessor.Listener passed to the constructor is used for output-related events.
// The VideoFrameProcessor.Listener passed to the constructor is used for output-related events.
throw new UnsupportedOperationException();
}
@Override
public void setErrorListener(Executor executor, ErrorListener errorListener) {
// The FrameProcessor.Listener passed to the constructor is used for errors.
// The VideoFrameProcessor.Listener passed to the constructor is used for errors.
throw new UnsupportedOperationException();
}
@ -157,8 +157,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
long streamOffsetUs =
checkStateNotNull(streamOffsetUsQueue.peek(), "No input stream specified.");
long offsetPresentationTimeUs = presentationTimeUs + streamOffsetUs;
frameProcessorListenerExecutor.execute(
() -> frameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onOutputFrameAvailable(offsetPresentationTimeUs));
if (releaseFramesAutomatically) {
renderFrameToSurfaces(
inputTexture, presentationTimeUs, /* releaseTimeNs= */ offsetPresentationTimeUs * 1000);
@ -189,7 +189,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState(!streamOffsetUsQueue.isEmpty(), "No input stream to end.");
streamOffsetUsQueue.remove();
if (streamOffsetUsQueue.isEmpty()) {
frameProcessorListenerExecutor.execute(frameProcessorListener::onFrameProcessingEnded);
videoFrameProcessorListenerExecutor.execute(videoFrameProcessorListener::onEnded);
}
}
@ -206,14 +206,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
@WorkerThread
public synchronized void release() throws FrameProcessingException {
public synchronized void release() throws VideoFrameProcessingException {
if (matrixShaderProgram != null) {
matrixShaderProgram.release();
}
try {
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
@ -247,7 +247,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Sets the output {@link SurfaceInfo}.
*
* @see FrameProcessor#setOutputSurfaceInfo(SurfaceInfo)
* @see VideoFrameProcessor#setOutputSurfaceInfo(SurfaceInfo)
*/
public synchronized void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
if (!Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
@ -257,9 +257,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
try {
GlUtil.destroyEglSurface(eglDisplay, outputEglSurface);
} catch (GlUtil.GlException e) {
frameProcessorListenerExecutor.execute(
() ->
frameProcessorListener.onFrameProcessingError(FrameProcessingException.from(e)));
videoFrameProcessorListenerExecutor.execute(
() -> videoFrameProcessorListener.onError(VideoFrameProcessingException.from(e)));
}
this.outputEglSurface = null;
}
@ -277,11 +276,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs) {
try {
maybeRenderFrameToOutputSurface(inputTexture, presentationTimeUs, releaseTimeNs);
} catch (FrameProcessingException | GlUtil.GlException e) {
frameProcessorListenerExecutor.execute(
} catch (VideoFrameProcessingException | GlUtil.GlException e) {
videoFrameProcessorListenerExecutor.execute(
() ->
frameProcessorListener.onFrameProcessingError(
FrameProcessingException.from(e, presentationTimeUs)));
videoFrameProcessorListener.onError(
VideoFrameProcessingException.from(e, presentationTimeUs)));
}
maybeRenderFrameToDebugSurface(inputTexture, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture);
@ -289,8 +288,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private synchronized void maybeRenderFrameToOutputSurface(
TextureInfo inputTexture, long presentationTimeUs, long releaseTimeNs)
throws FrameProcessingException, GlUtil.GlException {
if (releaseTimeNs == FrameProcessor.DROP_OUTPUT_FRAME
throws VideoFrameProcessingException, GlUtil.GlException {
if (releaseTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|| !ensureConfigured(inputTexture.width, inputTexture.height)) {
return; // Drop frames when requested, or there is no output surface.
}
@ -311,7 +310,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
EGLExt.eglPresentationTimeANDROID(
eglDisplay,
outputEglSurface,
releaseTimeNs == FrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY
releaseTimeNs == VideoFrameProcessor.RELEASE_OUTPUT_FRAME_IMMEDIATELY
? System.nanoTime()
: releaseTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
@ -321,7 +320,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
expression = {"outputSurfaceInfo", "outputEglSurface", "matrixShaderProgram"},
result = true)
private synchronized boolean ensureConfigured(int inputWidth, int inputHeight)
throws FrameProcessingException, GlUtil.GlException {
throws VideoFrameProcessingException, GlUtil.GlException {
if (this.inputWidth != inputWidth
|| this.inputHeight != inputHeight
@ -333,9 +332,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (!Util.areEqual(
this.outputSizeBeforeSurfaceTransformation, outputSizeBeforeSurfaceTransformation)) {
this.outputSizeBeforeSurfaceTransformation = outputSizeBeforeSurfaceTransformation;
frameProcessorListenerExecutor.execute(
videoFrameProcessorListenerExecutor.execute(
() ->
frameProcessorListener.onOutputSizeChanged(
videoFrameProcessorListener.onOutputSizeChanged(
outputSizeBeforeSurfaceTransformation.getWidth(),
outputSizeBeforeSurfaceTransformation.getHeight()));
}
@ -389,7 +388,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
private MatrixShaderProgram createMatrixShaderProgramForOutputSurface(
SurfaceInfo outputSurfaceInfo) throws FrameProcessingException {
SurfaceInfo outputSurfaceInfo) throws VideoFrameProcessingException {
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
new ImmutableList.Builder<GlMatrixTransformation>().addAll(matrixTransformations);
if (outputSurfaceInfo.orientationDegrees != 0) {
@ -453,7 +452,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
matrixShaderProgram.drawFrame(inputTexture.texId, presentationTimeUs);
matrixShaderProgram.setOutputColorTransfer(configuredColorTransfer);
});
} catch (FrameProcessingException | GlUtil.GlException e) {
} catch (VideoFrameProcessingException | GlUtil.GlException e) {
Log.d(TAG, "Error rendering to debug preview", e);
}
}
@ -502,8 +501,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* otherwise.
*/
@WorkerThread
public synchronized void maybeRenderToSurfaceView(FrameProcessingTask renderingTask)
throws GlUtil.GlException, FrameProcessingException {
public synchronized void maybeRenderToSurfaceView(VideoFrameProcessingTask renderingTask)
throws GlUtil.GlException, VideoFrameProcessingException {
if (surface == null) {
return;
}

View File

@ -19,13 +19,13 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import androidx.annotation.IntRange;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/**
* Caches the input frames.
*
* <p>Example usage: cache the processed frames when presenting them on screen, to accommodate for
* the possible fluctuation in frame processing time between frames.
* the possible fluctuation in video frame processing time between frames.
*/
public final class FrameCache implements GlEffect {
/** The capacity of the frame cache. */
@ -49,7 +49,7 @@ public final class FrameCache implements GlEffect {
@Override
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new FrameCacheShaderProgram(context, capacity, useHdr);
}
}

View File

@ -19,9 +19,9 @@ import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Context;
import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
@ -54,7 +54,7 @@ import java.util.concurrent.Executor;
/** Creates a new instance. */
public FrameCacheShaderProgram(Context context, int capacity, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
freeOutputTextures = new ArrayDeque<>();
inUseOutputTextures = new ArrayDeque<>();
try {
@ -64,7 +64,7 @@ import java.util.concurrent.Executor;
VERTEX_SHADER_TRANSFORMATION_ES2_PATH,
FRAGMENT_SHADER_TRANSFORMATION_ES2_PATH);
} catch (IOException | GlUtil.GlException e) {
throw FrameProcessingException.from(e);
throw VideoFrameProcessingException.from(e);
}
this.capacity = capacity;
this.useHdr = useHdr;
@ -80,7 +80,7 @@ import java.util.concurrent.Executor;
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = frameProcessingException -> {};
errorListener = videoFrameProcessingException -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
}
@ -129,7 +129,7 @@ import java.util.concurrent.Executor;
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (GlUtil.GlException | NoSuchElementException e) {
errorListenerExecutor.execute(
() -> errorListener.onFrameProcessingError(FrameProcessingException.from(e)));
() -> errorListener.onError(VideoFrameProcessingException.from(e)));
}
}
@ -167,11 +167,11 @@ import java.util.concurrent.Executor;
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
try {
deleteAllOutputTextures();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}

View File

@ -17,7 +17,7 @@ package com.google.android.exoplayer2.effect;
import android.content.Context;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/**
* Interface for a video frame effect with a {@link GlShaderProgram} implementation.
@ -34,10 +34,11 @@ public interface GlEffect extends Effect {
* @param context A {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If an error occurs while creating the {@link GlShaderProgram}.
* @throws VideoFrameProcessingException If an error occurs while creating the {@link
* GlShaderProgram}.
*/
GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException;
throws VideoFrameProcessingException;
/**
* Returns whether a {@link GlEffect} applies no change at every timestamp.

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.effect;
import android.content.Context;
import android.opengl.Matrix;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
/**
@ -52,7 +52,7 @@ public interface GlMatrixTransformation extends GlEffect {
@Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return MatrixShaderProgram.create(
context,
/* matrixTransformations= */ ImmutableList.of(this),

View File

@ -15,7 +15,7 @@
*/
package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.util.concurrent.Executor;
/**
@ -45,7 +45,7 @@ import java.util.concurrent.Executor;
public interface GlShaderProgram {
/**
* Listener for input-related frame processing events.
* Listener for input-related video frame processing events.
*
* <p>This listener can be called from any thread.
*/
@ -79,7 +79,7 @@ public interface GlShaderProgram {
}
/**
* Listener for output-related frame processing events.
* Listener for output-related video frame processing events.
*
* <p>This listener can be called from any thread.
*/
@ -106,26 +106,26 @@ public interface GlShaderProgram {
}
/**
* Listener for frame processing errors.
* Listener for video frame processing errors.
*
* <p>This listener can be called from any thread.
*/
interface ErrorListener {
/**
* Called when an exception occurs during asynchronous frame processing.
* Called when an exception occurs during asynchronous video frame processing.
*
* <p>If an error occurred, consuming and producing further frames will not work as expected and
* the {@link GlShaderProgram} should be released.
*/
void onFrameProcessingError(FrameProcessingException e);
void onError(VideoFrameProcessingException e);
}
/**
* Sets the {@link InputListener}.
*
* <p>The {@link InputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link InputListener} methods
* on its internal thread.
* context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link InputListener}
* methods on its internal thread.
*/
void setInputListener(InputListener inputListener);
@ -133,7 +133,7 @@ public interface GlShaderProgram {
* Sets the {@link OutputListener}.
*
* <p>The {@link OutputListener} should be invoked on the thread that owns the parent OpenGL
* context. For example, {@link GlEffectsFrameProcessor} invokes the {@link OutputListener}
* context. For example, {@link DefaultVideoFrameProcessor} invokes the {@link OutputListener}
* methods on its internal thread.
*/
void setOutputListener(OutputListener outputListener);
@ -188,7 +188,7 @@ public interface GlShaderProgram {
/**
* Releases all resources.
*
* @throws FrameProcessingException If an error occurs while releasing resources.
* @throws VideoFrameProcessingException If an error occurs while releasing resources.
*/
void release() throws FrameProcessingException;
void release() throws VideoFrameProcessingException;
}

View File

@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
/** Adjusts the HSL (Hue, Saturation, and Lightness) of a frame. */
@ -112,7 +112,7 @@ public class HslAdjustment implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new HslShaderProgram(context, /* hslAdjustment= */ this, useHdr);
}
}

View File

@ -20,10 +20,10 @@ import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import android.content.Context;
import android.opengl.GLES20;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import java.io.IOException;
/** Applies the {@link HslAdjustment} to each frame in the fragment shader. */
@ -40,10 +40,10 @@ import java.io.IOException;
* @param hslAdjustment The {@link HslAdjustment} to apply to each frame in order.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public HslShaderProgram(Context context, HslAdjustment hslAdjustment, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
// TODO(b/241241680): Check if HDR <-> HSL works the same or not.
checkArgument(!useHdr, "HDR is not yet supported.");
@ -51,7 +51,7 @@ import java.io.IOException;
try {
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
// Draw the frame on the entire normalized device coordinate space, from -1 to 1, for x and y.
@ -78,7 +78,8 @@ import java.io.IOException;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexId, /* texUnitIndex= */ 0);
@ -87,7 +88,7 @@ import java.io.IOException;
// The four-vertex triangle strip forms a quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
}

View File

@ -23,20 +23,21 @@ import android.opengl.GLES20;
import android.opengl.GLUtils;
import androidx.annotation.WorkerThread;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* Forwards a frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for consumption.
* Forwards a video frame produced from a {@link Bitmap} to a {@link GlShaderProgram} for
* consumption.
*
* <p>Methods in this class can be called from any thread.
*/
/* package */ final class InternalTextureManager implements GlShaderProgram.InputListener {
private final GlShaderProgram shaderProgram;
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor;
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
// The queue holds all bitmaps with one or more frames pending to be sent downstream.
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
@ -51,13 +52,14 @@ import java.util.concurrent.LinkedBlockingQueue;
*
* @param shaderProgram The {@link GlShaderProgram} for which this {@code InternalTextureManager}
* will be set as the {@link GlShaderProgram.InputListener}.
* @param frameProcessingTaskExecutor The {@link FrameProcessingTaskExecutor} that the methods of
* this class run on.
* @param videoFrameProcessingTaskExecutor The {@link VideoFrameProcessingTaskExecutor} that the
* methods of this class run on.
*/
public InternalTextureManager(
GlShaderProgram shaderProgram, FrameProcessingTaskExecutor frameProcessingTaskExecutor) {
GlShaderProgram shaderProgram,
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor) {
this.shaderProgram = shaderProgram;
this.frameProcessingTaskExecutor = frameProcessingTaskExecutor;
this.videoFrameProcessingTaskExecutor = videoFrameProcessingTaskExecutor;
pendingBitmaps = new LinkedBlockingQueue<>();
}
@ -67,7 +69,7 @@ import java.util.concurrent.LinkedBlockingQueue;
// program and change to only allocate one texId at a time. A change to the
// onInputFrameProcessed() method signature to include presentationTimeUs will probably be
// needed to do this.
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
downstreamShaderProgramCapacity++;
maybeQueueToShaderProgram();
@ -77,21 +79,21 @@ import java.util.concurrent.LinkedBlockingQueue;
/**
* Provides an input {@link Bitmap} to put into the video frames.
*
* @see FrameProcessor#queueInputBitmap
* @see VideoFrameProcessor#queueInputBitmap
*/
public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, float frameRate, boolean useHdr) {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> setupBitmap(inputBitmap, durationUs, frameRate, useHdr));
}
/**
* Signals the end of the input.
*
* @see FrameProcessor#signalEndOfInput()
* @see VideoFrameProcessor#signalEndOfInput()
*/
public void signalEndOfInput() {
frameProcessingTaskExecutor.submit(
videoFrameProcessingTaskExecutor.submit(
() -> {
inputEnded = true;
maybeSignalEndOfOutput();
@ -100,7 +102,7 @@ import java.util.concurrent.LinkedBlockingQueue;
@WorkerThread
private void setupBitmap(Bitmap bitmap, long durationUs, float frameRate, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
if (inputEnded) {
return;
@ -114,7 +116,7 @@ import java.util.concurrent.LinkedBlockingQueue;
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw FrameProcessingException.from(e);
throw VideoFrameProcessingException.from(e);
}
TextureInfo textureInfo =
new TextureInfo(

View File

@ -23,10 +23,10 @@ import android.opengl.GLES20;
import android.opengl.Matrix;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
@ -141,15 +141,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations.
* @param useHdr Whether input and output colors are HDR.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram create(
Context context,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
GlProgram glProgram =
createGlProgram(
context, VERTEX_SHADER_TRANSFORMATION_PATH, FRAGMENT_SHADER_TRANSFORMATION_PATH);
@ -183,8 +183,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createWithInternalSampler(
Context context,
@ -192,7 +192,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkState(
!ColorInfo.isTransferHdr(inputColorInfo),
"MatrixShaderProgram doesn't support HDR internal sampler input yet.");
@ -227,8 +227,8 @@ import java.util.List;
* @param outputColorInfo The output electrical (nonlinear) or optical (linear) {@link ColorInfo}.
* If this is an optical color, it must be BT.2020 if {@code inputColorInfo} is {@linkplain
* ColorInfo#isTransferHdr(ColorInfo) HDR}, and RGB BT.709 if not.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createWithExternalSampler(
Context context,
@ -236,7 +236,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
String vertexShaderFilePath =
isInputTransferHdr
@ -270,15 +270,15 @@ import java.util.List;
* @param rgbMatrices The {@link RgbMatrix RgbMatrices} to apply to each frame in order. Can be
* empty to apply no color transformations.
* @param outputColorInfo The electrical (non-linear) {@link ColorInfo} describing output colors.
* @throws FrameProcessingException If a problem occurs while reading shader files or an OpenGL
* operation fails or is unsupported.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files or an
* OpenGL operation fails or is unsupported.
*/
public static MatrixShaderProgram createApplyingOetf(
Context context,
List<GlMatrixTransformation> matrixTransformations,
List<RgbMatrix> rgbMatrices,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean outputIsHdr = ColorInfo.isTransferHdr(outputColorInfo);
String vertexShaderFilePath =
outputIsHdr ? VERTEX_SHADER_TRANSFORMATION_ES3_PATH : VERTEX_SHADER_TRANSFORMATION_PATH;
@ -315,7 +315,7 @@ import java.util.List;
List<RgbMatrix> rgbMatrices,
ColorInfo inputColorInfo,
ColorInfo outputColorInfo)
throws FrameProcessingException {
throws VideoFrameProcessingException {
boolean isInputTransferHdr = ColorInfo.isTransferHdr(inputColorInfo);
@C.ColorTransfer int outputColorTransfer = outputColorInfo.colorTransfer;
if (isInputTransferHdr) {
@ -323,7 +323,7 @@ import java.util.List;
// In HDR editing mode the decoder output is sampled in YUV.
if (!GlUtil.isYuvTargetExtensionSupported()) {
throw new FrameProcessingException(
throw new VideoFrameProcessingException(
"The EXT_YUV_target extension is required for HDR editing input.");
}
glProgram.setFloatsUniform(
@ -396,13 +396,13 @@ import java.util.List;
private static GlProgram createGlProgram(
Context context, String vertexShaderFilePath, String fragmentShaderFilePath)
throws FrameProcessingException {
throws VideoFrameProcessingException {
GlProgram glProgram;
try {
glProgram = new GlProgram(context, vertexShaderFilePath, fragmentShaderFilePath);
} catch (IOException | GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
float[] identityMatrix = GlUtil.create4x4IdentityMatrix();
@ -421,7 +421,8 @@ import java.util.List;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
updateCompositeRgbaMatrixArray(presentationTimeUs);
updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs);
if (visiblePolygon.size() < 3) {
@ -442,17 +443,17 @@ import java.util.List;
GLES20.GL_TRIANGLE_FAN, /* first= */ 0, /* count= */ visiblePolygon.size());
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}

View File

@ -16,7 +16,7 @@
package com.google.android.exoplayer2.effect;
import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
/**
@ -38,7 +38,7 @@ public final class OverlayEffect implements GlEffect {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return new OverlayShaderProgram(context, useHdr, overlays);
}
}

View File

@ -21,11 +21,11 @@ import android.content.Context;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Pair;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlProgram;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
/** Applies zero or more {@link TextureOverlay}s onto each frame. */
@ -49,11 +49,11 @@ import com.google.common.collect.ImmutableList;
* @param context The {@link Context}.
* @param useHdr Whether input textures come from an HDR source. If {@code true}, colors will be
* in linear RGB BT.2020. If {@code false}, colors will be in linear RGB BT.709.
* @throws FrameProcessingException If a problem occurs while reading shader files.
* @throws VideoFrameProcessingException If a problem occurs while reading shader files.
*/
public OverlayShaderProgram(
Context context, boolean useHdr, ImmutableList<TextureOverlay> overlays)
throws FrameProcessingException {
throws VideoFrameProcessingException {
super(useHdr);
checkArgument(!useHdr, "OverlayShaderProgram does not support HDR colors yet.");
// The maximum number of samplers allowed in a single GL program is 16.
@ -70,7 +70,7 @@ import com.google.common.collect.ImmutableList;
glProgram =
new GlProgram(createVertexShader(overlays.size()), createFragmentShader(overlays.size()));
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
glProgram.setBufferAttribute(
@ -91,7 +91,8 @@ import com.google.common.collect.ImmutableList;
}
@Override
public void drawFrame(int inputTexId, long presentationTimeUs) throws FrameProcessingException {
public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException {
try {
glProgram.use();
if (!overlays.isEmpty()) {
@ -155,17 +156,17 @@ import com.google.common.collect.ImmutableList;
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e, presentationTimeUs);
throw new VideoFrameProcessingException(e, presentationTimeUs);
}
}
@Override
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
super.release();
try {
glProgram.delete();
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}

View File

@ -19,7 +19,7 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Provides common color filters. */
@ -90,7 +90,7 @@ public class RgbFilter implements RgbMatrix {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkForConsistentHdrSetting(useHdr);
return RgbMatrix.super.toGlShaderProgram(context, useHdr);
}

View File

@ -17,7 +17,7 @@
package com.google.android.exoplayer2.effect;
import android.content.Context;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.collect.ImmutableList;
/**
@ -39,7 +39,7 @@ public interface RgbMatrix extends GlEffect {
@Override
default SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
return MatrixShaderProgram.create(
context,
/* matrixTransformations= */ ImmutableList.of(),

View File

@ -24,9 +24,9 @@ import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** Transforms the colors of a frame by applying the same color lookup table to each frame. */
public class SingleColorLut implements ColorLut {
@ -148,13 +148,13 @@ public class SingleColorLut implements ColorLut {
@Override
public SingleFrameGlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
throws FrameProcessingException {
throws VideoFrameProcessingException {
checkState(!useHdr, "HDR is currently not supported.");
try {
lutTextureId = storeLutAsTexture(lut);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException("Could not store the LUT as a texture.", e);
throw new VideoFrameProcessingException("Could not store the LUT as a texture.", e);
}
return new ColorLutShaderProgram(context, /* colorLut= */ this, useHdr);

View File

@ -18,9 +18,9 @@ package com.google.android.exoplayer2.effect;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import androidx.annotation.CallSuper;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.concurrent.Executor;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
@ -59,7 +59,7 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
this.useHdr = useHdr;
inputListener = new InputListener() {};
outputListener = new OutputListener() {};
errorListener = (frameProcessingException) -> {};
errorListener = (videoFrameProcessingException) -> {};
errorListenerExecutor = MoreExecutors.directExecutor();
}
@ -72,9 +72,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
* @param inputWidth The input width, in pixels.
* @param inputHeight The input height, in pixels.
* @return The output width and height of frames processed through {@link #drawFrame(int, long)}.
* @throws FrameProcessingException If an error occurs while configuring.
* @throws VideoFrameProcessingException If an error occurs while configuring.
*/
public abstract Size configure(int inputWidth, int inputHeight) throws FrameProcessingException;
public abstract Size configure(int inputWidth, int inputHeight)
throws VideoFrameProcessingException;
/**
* Draws one frame.
@ -88,10 +89,10 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
*
* @param inputTexId Identifier of a 2D OpenGL texture containing the input frame.
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract void drawFrame(int inputTexId, long presentationTimeUs)
throws FrameProcessingException;
throws VideoFrameProcessingException;
@Override
public final void setInputListener(InputListener inputListener) {
@ -132,19 +133,19 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
drawFrame(inputTexture.texId, presentationTimeUs);
inputListener.onInputFrameProcessed(inputTexture);
outputListener.onOutputFrameAvailable(outputTexture, presentationTimeUs);
} catch (FrameProcessingException | GlUtil.GlException | RuntimeException e) {
} catch (VideoFrameProcessingException | GlUtil.GlException | RuntimeException e) {
errorListenerExecutor.execute(
() ->
errorListener.onFrameProcessingError(
e instanceof FrameProcessingException
? (FrameProcessingException) e
: new FrameProcessingException(e)));
errorListener.onError(
e instanceof VideoFrameProcessingException
? (VideoFrameProcessingException) e
: new VideoFrameProcessingException(e)));
}
}
@EnsuresNonNull("outputTexture")
private void configureOutputTexture(int inputWidth, int inputHeight)
throws GlUtil.GlException, FrameProcessingException {
throws GlUtil.GlException, VideoFrameProcessingException {
this.inputWidth = inputWidth;
this.inputHeight = inputHeight;
Size outputSize = configure(inputWidth, inputHeight);
@ -182,12 +183,12 @@ public abstract class SingleFrameGlShaderProgram implements GlShaderProgram {
@Override
@CallSuper
public void release() throws FrameProcessingException {
public void release() throws VideoFrameProcessingException {
if (outputTexture != null) {
try {
GlUtil.deleteTexture(outputTexture.texId);
} catch (GlUtil.GlException e) {
throw new FrameProcessingException(e);
throw new VideoFrameProcessingException(e);
}
}
}

View File

@ -15,8 +15,8 @@
*/
package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.Size;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/** Creates overlays from OpenGL textures. */
public abstract class TextureOverlay {
@ -24,9 +24,9 @@ public abstract class TextureOverlay {
* Returns the overlay texture identifier displayed at the specified timestamp.
*
* @param presentationTimeUs The presentation timestamp of the current frame, in microseconds.
* @throws FrameProcessingException If an error occurs while processing or drawing the frame.
* @throws VideoFrameProcessingException If an error occurs while processing or drawing the frame.
*/
public abstract int getTextureId(long presentationTimeUs) throws FrameProcessingException;
public abstract int getTextureId(long presentationTimeUs) throws VideoFrameProcessingException;
// This method is required to find the size of a texture given a texture identifier using OpenGL
// ES 2.0. OpenGL ES 3.1 can do this with glGetTexLevelParameteriv().

View File

@ -15,14 +15,14 @@
*/
package com.google.android.exoplayer2.effect;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
/**
* Interface for tasks that may throw a {@link GlUtil.GlException} or {@link
* FrameProcessingException}.
* VideoFrameProcessingException}.
*/
/* package */ interface FrameProcessingTask {
/* package */ interface VideoFrameProcessingTask {
/** Runs the task. */
void run() throws FrameProcessingException, GlUtil.GlException;
void run() throws VideoFrameProcessingException, GlUtil.GlException;
}

View File

@ -19,8 +19,8 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
import androidx.annotation.GuardedBy;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import java.util.ArrayDeque;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
@ -29,36 +29,36 @@ import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
/**
* Wrapper around a single thread {@link ExecutorService} for executing {@link FrameProcessingTask}
* instances.
* Wrapper around a single thread {@link ExecutorService} for executing {@link
* VideoFrameProcessingTask} instances.
*
* <p>Public methods can be called from any thread.
*
* <p>The wrapper handles calling {@link
* FrameProcessor.Listener#onFrameProcessingError(FrameProcessingException)} for errors that occur
* during these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed
* to be non-recoverable, so the {@code FrameProcessingTaskExecutor} should be released if an error
* VideoFrameProcessor.Listener#onError(VideoFrameProcessingException)} for errors that occur during
* these tasks. The listener is invoked from the {@link ExecutorService}. Errors are assumed to be
* non-recoverable, so the {@code VideoFrameProcessingTaskExecutor} should be released if an error
* occurs.
*
* <p>{@linkplain #submitWithHighPriority(FrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(FrameProcessingTask) default priority tasks}. Tasks with
* <p>{@linkplain #submitWithHighPriority(VideoFrameProcessingTask) High priority tasks} are always
* executed before {@linkplain #submit(VideoFrameProcessingTask) default priority tasks}. Tasks with
* equal priority are executed in FIFO order.
*/
/* package */ final class FrameProcessingTaskExecutor {
/* package */ final class VideoFrameProcessingTaskExecutor {
private final ExecutorService singleThreadExecutorService;
private final FrameProcessor.Listener listener;
private final VideoFrameProcessor.Listener listener;
private final Object lock;
@GuardedBy("lock")
private final ArrayDeque<FrameProcessingTask> highPriorityTasks;
private final ArrayDeque<VideoFrameProcessingTask> highPriorityTasks;
@GuardedBy("lock")
private boolean shouldCancelTasks;
/** Creates a new instance. */
public FrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, FrameProcessor.Listener listener) {
public VideoFrameProcessingTaskExecutor(
ExecutorService singleThreadExecutorService, VideoFrameProcessor.Listener listener) {
this.singleThreadExecutorService = singleThreadExecutorService;
this.listener = listener;
lock = new Object();
@ -66,11 +66,11 @@ import java.util.concurrent.RejectedExecutionException;
}
/**
* Submits the given {@link FrameProcessingTask} to be executed after all pending tasks have
* Submits the given {@link VideoFrameProcessingTask} to be executed after all pending tasks have
* completed.
*/
@SuppressWarnings("FutureReturnValueIgnored")
public void submit(FrameProcessingTask task) {
public void submit(VideoFrameProcessingTask task) {
@Nullable RejectedExecutionException executionException = null;
synchronized (lock) {
if (shouldCancelTasks) {
@ -89,13 +89,13 @@ import java.util.concurrent.RejectedExecutionException;
}
/**
* Submits the given {@link FrameProcessingTask} to be executed after the currently running task
* and all previously submitted high-priority tasks have completed.
* Submits the given {@link VideoFrameProcessingTask} to be executed after the currently running
* task and all previously submitted high-priority tasks have completed.
*
* <p>Tasks that were previously {@linkplain #submit(FrameProcessingTask) submitted} without
* <p>Tasks that were previously {@linkplain #submit(VideoFrameProcessingTask) submitted} without
* high-priority and have not started executing will be executed after this task is complete.
*/
public void submitWithHighPriority(FrameProcessingTask task) {
public void submitWithHighPriority(VideoFrameProcessingTask task) {
synchronized (lock) {
if (shouldCancelTasks) {
return;
@ -111,7 +111,7 @@ import java.util.concurrent.RejectedExecutionException;
/**
* Flushes all scheduled tasks.
*
* <p>During flush, the {@code FrameProcessingTaskExecutor} ignores the {@linkplain #submit
* <p>During flush, the {@code VideoFrameProcessingTaskExecutor} ignores the {@linkplain #submit
* submission of new tasks}. The tasks that are submitted before flushing are either executed or
* canceled when this method returns.
*/
@ -137,12 +137,12 @@ import java.util.concurrent.RejectedExecutionException;
/**
* Cancels remaining tasks, runs the given release task, and shuts down the background thread.
*
* @param releaseTask A {@link FrameProcessingTask} to execute before shutting down the background
* thread.
* @param releaseTask A {@link VideoFrameProcessingTask} to execute before shutting down the
* background thread.
* @param releaseWaitTimeMs How long to wait for the release task to terminate, in milliseconds.
* @throws InterruptedException If interrupted while releasing resources.
*/
public void release(FrameProcessingTask releaseTask, long releaseWaitTimeMs)
public void release(VideoFrameProcessingTask releaseTask, long releaseWaitTimeMs)
throws InterruptedException {
synchronized (lock) {
shouldCancelTasks = true;
@ -153,16 +153,16 @@ import java.util.concurrent.RejectedExecutionException;
singleThreadExecutorService.shutdown();
try {
if (!singleThreadExecutorService.awaitTermination(releaseWaitTimeMs, MILLISECONDS)) {
listener.onFrameProcessingError(new FrameProcessingException("Release timed out"));
listener.onError(new VideoFrameProcessingException("Release timed out"));
}
releaseFuture.get();
} catch (ExecutionException e) {
listener.onFrameProcessingError(new FrameProcessingException(e));
listener.onError(new VideoFrameProcessingException(e));
}
}
private Future<?> wrapTaskAndSubmitToExecutorService(
FrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
VideoFrameProcessingTask defaultPriorityTask, boolean isFlushOrReleaseTask) {
return singleThreadExecutorService.submit(
() -> {
try {
@ -172,7 +172,7 @@ import java.util.concurrent.RejectedExecutionException;
}
}
@Nullable FrameProcessingTask nextHighPriorityTask;
@Nullable VideoFrameProcessingTask nextHighPriorityTask;
while (true) {
synchronized (lock) {
// Lock only polling to prevent blocking the public method calls.
@ -199,6 +199,6 @@ import java.util.concurrent.RejectedExecutionException;
}
shouldCancelTasks = true;
}
listener.onFrameProcessingError(FrameProcessingException.from(exception));
listener.onError(VideoFrameProcessingException.from(exception));
}
}

View File

@ -19,8 +19,8 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -30,20 +30,22 @@ import org.junit.runner.RunWith;
public final class ChainingGlShaderProgramListenerTest {
private static final long EXECUTOR_WAIT_TIME_MS = 100;
private final FrameProcessor.Listener mockFrameProcessorListener =
mock(FrameProcessor.Listener.class);
private final FrameProcessingTaskExecutor frameProcessingTaskExecutor =
new FrameProcessingTaskExecutor(
private final VideoFrameProcessor.Listener mockFrameProcessorListener =
mock(VideoFrameProcessor.Listener.class);
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor =
new VideoFrameProcessingTaskExecutor(
Util.newSingleThreadExecutor("Test"), mockFrameProcessorListener);
private final GlShaderProgram mockProducingGlShaderProgram = mock(GlShaderProgram.class);
private final GlShaderProgram mockConsumingGlShaderProgram = mock(GlShaderProgram.class);
private final ChainingGlShaderProgramListener chainingGlShaderProgramListener =
new ChainingGlShaderProgramListener(
mockProducingGlShaderProgram, mockConsumingGlShaderProgram, frameProcessingTaskExecutor);
mockProducingGlShaderProgram,
mockConsumingGlShaderProgram,
videoFrameProcessingTaskExecutor);
@After
public void release() throws InterruptedException {
frameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
videoFrameProcessingTaskExecutor.release(/* releaseTask= */ () -> {}, EXECUTOR_WAIT_TIME_MS);
}
@Test

View File

@ -28,9 +28,9 @@ import android.graphics.Bitmap;
import android.util.Log;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.testutil.DecodeOneFrameUtil;
import com.google.android.exoplayer2.testutil.FrameProcessorTestRunner;
import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.ColorInfo;
@ -40,10 +40,10 @@ import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link GlEffectsFrameProcessor}.
* Instrumentation pixel-test for HDR to SDR tone-mapping via {@link DefaultVideoFrameProcessor}.
*
* <p>Uses a {@link GlEffectsFrameProcessor} to process one frame, and checks that the actual output
* matches expected output, either from a golden file or from another edit.
* <p>Uses a {@link DefaultVideoFrameProcessor} to process one frame, and checks that the actual
* output matches expected output, either from a golden file or from another edit.
*/
// TODO(b/263395272): Move this test to effects/mh tests.
@RunWith(AndroidJUnit4.class)
@ -75,12 +75,12 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
"OpenGL-based HDR to SDR tone mapping is unsupported below API 29.";
private static final String SKIP_REASON_NO_YUV = "Device lacks YUV extension support.";
private @MonotonicNonNull FrameProcessorTestRunner frameProcessorTestRunner;
private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner;
@After
public void release() {
if (frameProcessorTestRunner != null) {
frameProcessorTestRunner.release();
if (videoFrameProcessorTestRunner != null) {
videoFrameProcessorTestRunner.release();
}
}
@ -114,7 +114,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build();
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_HLG_MP4_ASSET_STRING)
.setInputColorInfo(hlgColor)
@ -124,7 +124,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap;
try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
@ -177,7 +177,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.setColorRange(C.COLOR_RANGE_LIMITED)
.setColorTransfer(C.COLOR_TRANSFER_GAMMA_2_2)
.build();
frameProcessorTestRunner =
videoFrameProcessorTestRunner =
getDefaultFrameProcessorTestRunnerBuilder(testId)
.setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING)
.setInputColorInfo(pqColor)
@ -187,7 +187,7 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
Bitmap actualBitmap;
try {
actualBitmap = frameProcessorTestRunner.processFirstFrameAndEnd();
actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd();
} catch (UnsupportedOperationException e) {
if (e.getMessage() != null
&& e.getMessage().equals(DecodeOneFrameUtil.NO_DECODER_SUPPORT_ERROR_STRING)) {
@ -209,10 +209,10 @@ public final class ToneMapHdrToSdrUsingOpenGlPixelTest {
.isAtMost(MAXIMUM_DEVICE_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
}
private FrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) {
return new FrameProcessorTestRunner.Builder()
return new VideoFrameProcessorTestRunner.Builder()
.setTestId(testId)
.setFrameProcessorFactory(new GlEffectsFrameProcessor.Factory());
.setVideoFrameProcessorFactory(new DefaultVideoFrameProcessor.Factory());
}
}

View File

@ -17,9 +17,9 @@ package com.google.android.exoplayer2.transformer;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import java.util.List;
@ -43,19 +43,19 @@ public final class Effects {
*/
public final ImmutableList<Effect> videoEffects;
/**
* The {@link FrameProcessor.Factory} for the {@link FrameProcessor} to use when applying the
* {@code videoEffects} to the video frames.
* The {@link VideoFrameProcessor.Factory} for the {@link VideoFrameProcessor} to use when
* applying the {@code videoEffects} to the video frames.
*/
public final FrameProcessor.Factory frameProcessorFactory;
public final VideoFrameProcessor.Factory videoFrameProcessorFactory;
/**
* Creates an instance using a {@link GlEffectsFrameProcessor.Factory}.
* Creates an instance using a {@link DefaultVideoFrameProcessor.Factory}.
*
* <p>This is equivalent to calling {@link Effects#Effects(List, List, FrameProcessor.Factory)}
* with a {@link GlEffectsFrameProcessor.Factory}.
* <p>This is equivalent to calling {@link Effects#Effects(List, List,
* VideoFrameProcessor.Factory)} with a {@link DefaultVideoFrameProcessor.Factory}.
*/
public Effects(List<AudioProcessor> audioProcessors, List<Effect> videoEffects) {
this(audioProcessors, videoEffects, new GlEffectsFrameProcessor.Factory());
this(audioProcessors, videoEffects, new DefaultVideoFrameProcessor.Factory());
}
/**
@ -63,14 +63,14 @@ public final class Effects {
*
* @param audioProcessors The {@link #audioProcessors}.
* @param videoEffects The {@link #videoEffects}.
* @param frameProcessorFactory The {@link #frameProcessorFactory}.
* @param videoFrameProcessorFactory The {@link #videoFrameProcessorFactory}.
*/
public Effects(
List<AudioProcessor> audioProcessors,
List<Effect> videoEffects,
FrameProcessor.Factory frameProcessorFactory) {
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.audioProcessors = ImmutableList.copyOf(audioProcessors);
this.videoEffects = ImmutableList.copyOf(videoEffects);
this.frameProcessorFactory = frameProcessorFactory;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
}
}

View File

@ -23,9 +23,9 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableBiMap;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
@ -64,7 +64,7 @@ public final class TransformationException extends Exception {
ERROR_CODE_ENCODER_INIT_FAILED,
ERROR_CODE_ENCODING_FAILED,
ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED,
ERROR_CODE_FRAME_PROCESSING_FAILED,
ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED,
ERROR_CODE_AUDIO_PROCESSING_FAILED,
ERROR_CODE_MUXING_FAILED,
})
@ -149,8 +149,8 @@ public final class TransformationException extends Exception {
// Video editing errors (5xxx).
/** Caused by a frame processing failure. */
public static final int ERROR_CODE_FRAME_PROCESSING_FAILED = 5001;
/** Caused by a video frame processing failure. */
public static final int ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED = 5001;
// Audio processing errors (6xxx).
@ -180,7 +180,7 @@ public final class TransformationException extends Exception {
.put("ERROR_CODE_ENCODER_INIT_FAILED", ERROR_CODE_ENCODER_INIT_FAILED)
.put("ERROR_CODE_ENCODING_FAILED", ERROR_CODE_ENCODING_FAILED)
.put("ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED", ERROR_CODE_ENCODING_FORMAT_UNSUPPORTED)
.put("ERROR_CODE_FRAME_PROCESSING_FAILED", ERROR_CODE_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED", ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED)
.put("ERROR_CODE_AUDIO_PROCESSING_FAILED", ERROR_CODE_AUDIO_PROCESSING_FAILED)
.put("ERROR_CODE_MUXING_FAILED", ERROR_CODE_MUXING_FAILED)
.buildOrThrow();
@ -269,15 +269,15 @@ public final class TransformationException extends Exception {
}
/**
* Creates an instance for a {@link FrameProcessor} related exception.
* Creates an instance for a {@link VideoFrameProcessor} related exception.
*
* @param cause The cause of the failure.
* @param errorCode See {@link #errorCode}.
* @return The created instance.
*/
/* package */ static TransformationException createForFrameProcessingException(
FrameProcessingException cause, int errorCode) {
return new TransformationException("Frame processing error", cause, errorCode);
/* package */ static TransformationException createForVideoFrameProcessingException(
VideoFrameProcessingException cause, int errorCode) {
return new TransformationException("Video frame processing error", cause, errorCode);
}
/**

View File

@ -30,16 +30,16 @@ import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.effect.GlEffectsFrameProcessor;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.ListenerSet;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.InlineMe;
@ -87,7 +87,7 @@ public final class Transformer {
private boolean generateSilentAudio;
private ListenerSet<Transformer.Listener> listeners;
private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory;
private FrameProcessor.Factory frameProcessorFactory;
private VideoFrameProcessor.Factory videoFrameProcessorFactory;
private Codec.EncoderFactory encoderFactory;
private Muxer.Factory muxerFactory;
private Looper looper;
@ -104,7 +104,7 @@ public final class Transformer {
transformationRequest = new TransformationRequest.Builder().build();
audioProcessors = ImmutableList.of();
videoEffects = ImmutableList.of();
frameProcessorFactory = new GlEffectsFrameProcessor.Factory();
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory();
encoderFactory = new DefaultEncoderFactory.Builder(this.context).build();
muxerFactory = new DefaultMuxer.Factory();
looper = Util.getCurrentOrMainLooper();
@ -124,7 +124,7 @@ public final class Transformer {
this.generateSilentAudio = transformer.generateSilentAudio;
this.listeners = transformer.listeners;
this.assetLoaderFactory = transformer.assetLoaderFactory;
this.frameProcessorFactory = transformer.frameProcessorFactory;
this.videoFrameProcessorFactory = transformer.videoFrameProcessorFactory;
this.encoderFactory = transformer.encoderFactory;
this.muxerFactory = transformer.muxerFactory;
this.looper = transformer.looper;
@ -296,13 +296,14 @@ public final class Transformer {
}
/**
* @deprecated Set the {@link FrameProcessor.Factory} in an {@link EditedMediaItem}, and pass it
* to {@link #start(EditedMediaItem, String)} instead.
* @deprecated Set the {@link VideoFrameProcessor.Factory} in an {@link EditedMediaItem}, and
* pass it to {@link #start(EditedMediaItem, String)} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) {
this.frameProcessorFactory = frameProcessorFactory;
public Builder setFrameProcessorFactory(
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this;
}
@ -448,7 +449,7 @@ public final class Transformer {
generateSilentAudio,
listeners,
assetLoaderFactory,
frameProcessorFactory,
videoFrameProcessorFactory,
encoderFactory,
muxerFactory,
looper,
@ -606,7 +607,7 @@ public final class Transformer {
private final boolean generateSilentAudio;
private final ListenerSet<Transformer.Listener> listeners;
private final AssetLoader.Factory assetLoaderFactory;
private final FrameProcessor.Factory frameProcessorFactory;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Codec.EncoderFactory encoderFactory;
private final Muxer.Factory muxerFactory;
private final Looper looper;
@ -627,7 +628,7 @@ public final class Transformer {
boolean generateSilentAudio,
ListenerSet<Listener> listeners,
AssetLoader.Factory assetLoaderFactory,
FrameProcessor.Factory frameProcessorFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
Muxer.Factory muxerFactory,
Looper looper,
@ -645,7 +646,7 @@ public final class Transformer {
this.generateSilentAudio = generateSilentAudio;
this.listeners = listeners;
this.assetLoaderFactory = assetLoaderFactory;
this.frameProcessorFactory = frameProcessorFactory;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.encoderFactory = encoderFactory;
this.muxerFactory = muxerFactory;
this.looper = looper;
@ -842,7 +843,7 @@ public final class Transformer {
.setRemoveAudio(removeAudio)
.setRemoveVideo(removeVideo)
.setFlattenForSlowMotion(flattenForSlowMotion)
.setEffects(new Effects(audioProcessors, videoEffects, frameProcessorFactory))
.setEffects(new Effects(audioProcessors, videoEffects, videoFrameProcessorFactory))
.build();
start(editedMediaItem, path);
}

View File

@ -498,7 +498,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
streamOffsetUs,
transformationRequest,
firstEditedMediaItem.effects.videoEffects,
firstEditedMediaItem.effects.frameProcessorFactory,
firstEditedMediaItem.effects.videoFrameProcessorFactory,
encoderFactory,
muxerWrapper,
/* errorConsumer= */ this::onError,

View File

@ -38,12 +38,12 @@ import com.google.android.exoplayer2.util.Consumer;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
@ -58,8 +58,8 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final FrameProcessor frameProcessor;
private final ColorInfo frameProcessorInputColor;
private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo videoFrameProcessorInputColor;
private final FrameInfo firstFrameInfo;
private final EncoderWrapper encoderWrapper;
@ -67,7 +67,7 @@ import org.checkerframework.dataflow.qual.Pure;
/**
* The timestamp of the last buffer processed before {@linkplain
* FrameProcessor.Listener#onFrameProcessingEnded() frame processing has ended}.
* VideoFrameProcessor.Listener#onEnded() frame processing has ended}.
*/
private volatile long finalFramePresentationTimeUs;
@ -78,7 +78,7 @@ import org.checkerframework.dataflow.qual.Pure;
long streamOffsetUs,
TransformationRequest transformationRequest,
ImmutableList<Effect> effects,
FrameProcessor.Factory frameProcessorFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
MuxerWrapper muxerWrapper,
Consumer<TransformationException> errorConsumer,
@ -122,12 +122,12 @@ import org.checkerframework.dataflow.qual.Pure;
ColorInfo encoderInputColor = encoderWrapper.getSupportedInputColor();
// If not tone mapping using OpenGL, the decoder will output the encoderInputColor,
// possibly by tone mapping.
frameProcessorInputColor =
videoFrameProcessorInputColor =
isGlToneMapping ? checkNotNull(firstInputFormat.colorInfo) : encoderInputColor;
// For consistency with the Android platform, OpenGL tone mapping outputs colors with
// C.COLOR_TRANSFER_GAMMA_2_2 instead of C.COLOR_TRANSFER_SDR, and outputs this as
// C.COLOR_TRANSFER_SDR to the encoder.
ColorInfo frameProcessorOutputColor =
ColorInfo videoFrameProcessorOutputColor =
isGlToneMapping
? new ColorInfo.Builder()
.setColorSpace(C.COLOR_SPACE_BT709)
@ -136,23 +136,23 @@ import org.checkerframework.dataflow.qual.Pure;
.build()
: encoderInputColor;
try {
frameProcessor =
frameProcessorFactory.create(
videoFrameProcessor =
videoFrameProcessorFactory.create(
context,
effects,
debugViewProvider,
frameProcessorInputColor,
frameProcessorOutputColor,
videoFrameProcessorInputColor,
videoFrameProcessorOutputColor,
MimeTypes.isVideo(firstInputFormat.sampleMimeType),
/* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new FrameProcessor.Listener() {
new VideoFrameProcessor.Listener() {
private long lastProcessedFramePresentationTimeUs;
@Override
public void onOutputSizeChanged(int width, int height) {
try {
checkNotNull(frameProcessor)
checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height));
} catch (TransformationException exception) {
errorConsumer.accept(exception);
@ -166,14 +166,15 @@ import org.checkerframework.dataflow.qual.Pure;
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
public void onError(VideoFrameProcessingException exception) {
errorConsumer.accept(
TransformationException.createForFrameProcessingException(
exception, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED));
TransformationException.createForVideoFrameProcessingException(
exception,
TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
@Override
public void onFrameProcessingEnded() {
public void onEnded() {
VideoSamplePipeline.this.finalFramePresentationTimeUs =
lastProcessedFramePresentationTimeUs;
try {
@ -183,9 +184,9 @@ import org.checkerframework.dataflow.qual.Pure;
}
}
});
} catch (FrameProcessingException e) {
throw TransformationException.createForFrameProcessingException(
e, TransformationException.ERROR_CODE_FRAME_PROCESSING_FAILED);
} catch (VideoFrameProcessingException e) {
throw TransformationException.createForVideoFrameProcessingException(
e, TransformationException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
}
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth =
@ -206,43 +207,43 @@ import org.checkerframework.dataflow.qual.Pure;
@Override
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, Format trackFormat, long mediaItemOffsetUs) {
frameProcessor.setInputFrameInfo(
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
}
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
frameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
@Override
public Surface getInputSurface() {
return frameProcessor.getInputSurface();
return videoFrameProcessor.getInputSurface();
}
@Override
public ColorInfo getExpectedInputColorInfo() {
return frameProcessorInputColor;
return videoFrameProcessorInputColor;
}
@Override
public void registerVideoFrame() {
frameProcessor.registerInputFrame();
videoFrameProcessor.registerInputFrame();
}
@Override
public int getPendingVideoFrameCount() {
return frameProcessor.getPendingInputFrameCount();
return videoFrameProcessor.getPendingInputFrameCount();
}
@Override
public void signalEndOfVideoInput() {
frameProcessor.signalEndOfInput();
videoFrameProcessor.signalEndOfInput();
}
@Override
public void release() {
frameProcessor.release();
videoFrameProcessor.release();
encoderWrapper.release();
}

View File

@ -78,7 +78,7 @@ com.google.android.exoplayer2.text androidx.media3.common.text CueGroup Cue
com.google.android.exoplayer2.text com.google.android.exoplayer2.text ExoplayerCuesDecoder SubtitleDecoderFactory TextOutput TextRenderer
com.google.android.exoplayer2.upstream.crypto com.google.android.exoplayer2.upstream AesCipherDataSource AesCipherDataSink AesFlushingCipher
com.google.android.exoplayer2.util com.google.android.exoplayer2.util AtomicFile Assertions BundleableUtil BundleUtil Clock ClosedSource CodecSpecificDataUtil ColorParser ConditionVariable Consumer CopyOnWriteMultiset EGLSurfaceTexture GlProgram GlUtil HandlerWrapper LibraryLoader ListenerSet Log LongArray MediaFormatUtil NetworkTypeObserver NonNullApi NotificationUtil ParsableBitArray ParsableByteArray RepeatModeUtil RunnableFutureTask Size SystemClock SystemHandlerWrapper TimedValueQueue TimestampAdjuster TraceUtil UnknownNull UnstableApi UriUtil Util XmlPullParserUtil
com.google.android.exoplayer2.util androidx.media3.common DebugViewProvider Effect ErrorMessageProvider FlagSet FileTypes FrameInfo FrameProcessingException FrameProcessor MimeTypes PriorityTaskManager SurfaceInfo
com.google.android.exoplayer2.util androidx.media3.common DebugViewProvider Effect ErrorMessageProvider FlagSet FileTypes FrameInfo VideoFrameProcessingException VideoFrameProcessor MimeTypes PriorityTaskManager SurfaceInfo
com.google.android.exoplayer2.metadata androidx.media3.common Metadata
com.google.android.exoplayer2.metadata com.google.android.exoplayer2.metadata MetadataDecoderFactory MetadataOutput MetadataRenderer
com.google.android.exoplayer2.audio androidx.media3.common AudioAttributes AuxEffectInfo

View File

@ -32,9 +32,9 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.FrameInfo;
import com.google.android.exoplayer2.util.FrameProcessingException;
import com.google.android.exoplayer2.util.FrameProcessor;
import com.google.android.exoplayer2.util.SurfaceInfo;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
@ -43,17 +43,17 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** A test runner for {@link FrameProcessor} tests. */
/** A test runner for {@link VideoFrameProcessor} tests. */
@RequiresApi(19)
public final class FrameProcessorTestRunner {
public final class VideoFrameProcessorTestRunner {
/** A builder for {@link FrameProcessorTestRunner} instances. */
/** A builder for {@link VideoFrameProcessorTestRunner} instances. */
public static final class Builder {
/** The ratio of width over height, for each pixel in a frame. */
private static final float DEFAULT_PIXEL_WIDTH_HEIGHT_RATIO = 1;
private @MonotonicNonNull String testId;
private FrameProcessor.@MonotonicNonNull Factory frameProcessorFactory;
private VideoFrameProcessor.@MonotonicNonNull Factory videoFrameProcessorFactory;
private @MonotonicNonNull String videoAssetPath;
private @MonotonicNonNull String outputFileLabel;
private @MonotonicNonNull ImmutableList<Effect> effects;
@ -80,13 +80,14 @@ public final class FrameProcessorTestRunner {
}
/**
* Sets the {@link FrameProcessor.Factory}.
* Sets the {@link VideoFrameProcessor.Factory}.
*
* <p>This is a required value.
*/
@CanIgnoreReturnValue
public Builder setFrameProcessorFactory(FrameProcessor.Factory frameProcessorFactory) {
this.frameProcessorFactory = frameProcessorFactory;
public Builder setVideoFrameProcessorFactory(
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this;
}
@ -169,7 +170,7 @@ public final class FrameProcessorTestRunner {
return this;
}
/**
* Sets the input track type. See {@link FrameProcessor.Factory#create}.
* Sets the input track type. See {@link VideoFrameProcessor.Factory#create}.
*
* <p>The default value is {@code true}.
*/
@ -179,14 +180,14 @@ public final class FrameProcessorTestRunner {
return this;
}
public FrameProcessorTestRunner build() throws FrameProcessingException {
public VideoFrameProcessorTestRunner build() throws VideoFrameProcessingException {
checkStateNotNull(testId, "testId must be set.");
checkStateNotNull(frameProcessorFactory, "frameProcessorFactory must be set.");
checkStateNotNull(videoFrameProcessorFactory, "videoFrameProcessorFactory must be set.");
checkStateNotNull(videoAssetPath, "videoAssetPath must be set.");
return new FrameProcessorTestRunner(
return new VideoFrameProcessorTestRunner(
testId,
frameProcessorFactory,
videoFrameProcessorFactory,
videoAssetPath,
outputFileLabel == null ? "" : outputFileLabel,
effects == null ? ImmutableList.of() : effects,
@ -198,25 +199,25 @@ public final class FrameProcessorTestRunner {
}
/**
* Time to wait for the decoded frame to populate the {@link FrameProcessor} instance's input
* surface and the {@link FrameProcessor} to finish processing the frame, in milliseconds.
* Time to wait for the decoded frame to populate the {@link VideoFrameProcessor} instance's input
* surface and the {@link VideoFrameProcessor} to finish processing the frame, in milliseconds.
*/
private static final int FRAME_PROCESSING_WAIT_MS = 5000;
private static final int VIDEO_FRAME_PROCESSING_WAIT_MS = 5000;
private final String testId;
private final String videoAssetPath;
private final String outputFileLabel;
private final float pixelWidthHeightRatio;
private final AtomicReference<FrameProcessingException> frameProcessingException;
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
private final FrameProcessor frameProcessor;
private final VideoFrameProcessor videoFrameProcessor;
private volatile @MonotonicNonNull ImageReader outputImageReader;
private volatile boolean frameProcessingEnded;
private volatile boolean videoFrameProcessingEnded;
private FrameProcessorTestRunner(
private VideoFrameProcessorTestRunner(
String testId,
FrameProcessor.Factory frameProcessorFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
String videoAssetPath,
String outputFileLabel,
ImmutableList<Effect> effects,
@ -224,15 +225,15 @@ public final class FrameProcessorTestRunner {
ColorInfo inputColorInfo,
ColorInfo outputColorInfo,
boolean isInputTextureExternal)
throws FrameProcessingException {
throws VideoFrameProcessingException {
this.testId = testId;
this.videoAssetPath = videoAssetPath;
this.outputFileLabel = outputFileLabel;
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
frameProcessingException = new AtomicReference<>();
videoFrameProcessingException = new AtomicReference<>();
frameProcessor =
frameProcessorFactory.create(
videoFrameProcessor =
videoFrameProcessorFactory.create(
getApplicationContext(),
effects,
DebugViewProvider.NONE,
@ -241,13 +242,13 @@ public final class FrameProcessorTestRunner {
isInputTextureExternal,
/* releaseFramesAutomatically= */ true,
MoreExecutors.directExecutor(),
new FrameProcessor.Listener() {
new VideoFrameProcessor.Listener() {
@Override
public void onOutputSizeChanged(int width, int height) {
outputImageReader =
ImageReader.newInstance(
width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
checkNotNull(frameProcessor)
checkNotNull(videoFrameProcessor)
.setOutputSurfaceInfo(
new SurfaceInfo(outputImageReader.getSurface(), width, height));
}
@ -258,13 +259,13 @@ public final class FrameProcessorTestRunner {
}
@Override
public void onFrameProcessingError(FrameProcessingException exception) {
frameProcessingException.set(exception);
public void onError(VideoFrameProcessingException exception) {
videoFrameProcessingException.set(exception);
}
@Override
public void onFrameProcessingEnded() {
frameProcessingEnded = true;
public void onEnded() {
videoFrameProcessingEnded = true;
}
});
}
@ -275,13 +276,13 @@ public final class FrameProcessorTestRunner {
new DecodeOneFrameUtil.Listener() {
@Override
public void onContainerExtracted(MediaFormat mediaFormat) {
frameProcessor.setInputFrameInfo(
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(
mediaFormat.getInteger(MediaFormat.KEY_WIDTH),
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
frameProcessor.registerInputFrame();
videoFrameProcessor.registerInputFrame();
}
@Override
@ -289,36 +290,36 @@ public final class FrameProcessorTestRunner {
// Do nothing.
}
},
frameProcessor.getInputSurface());
videoFrameProcessor.getInputSurface());
return endFrameProcessingAndGetImage();
}
public Bitmap processImageFrameAndEnd(Bitmap inputBitmap) throws Exception {
frameProcessor.setInputFrameInfo(
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(inputBitmap.getWidth(), inputBitmap.getHeight())
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
.build());
frameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
videoFrameProcessor.queueInputBitmap(inputBitmap, C.MICROS_PER_SECOND, /* frameRate= */ 1);
return endFrameProcessingAndGetImage();
}
private Bitmap endFrameProcessingAndGetImage() throws Exception {
frameProcessor.signalEndOfInput();
Thread.sleep(FRAME_PROCESSING_WAIT_MS);
videoFrameProcessor.signalEndOfInput();
Thread.sleep(VIDEO_FRAME_PROCESSING_WAIT_MS);
assertThat(frameProcessingEnded).isTrue();
assertThat(frameProcessingException.get()).isNull();
assertThat(videoFrameProcessingEnded).isTrue();
assertThat(videoFrameProcessingException.get()).isNull();
Image frameProcessorOutputImage = checkNotNull(outputImageReader).acquireLatestImage();
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888Image(frameProcessorOutputImage);
frameProcessorOutputImage.close();
Image videoFrameProcessorOutputImage = checkNotNull(outputImageReader).acquireLatestImage();
Bitmap actualBitmap = createArgb8888BitmapFromRgba8888Image(videoFrameProcessorOutputImage);
videoFrameProcessorOutputImage.close();
maybeSaveTestBitmapToCacheDirectory(testId, /* bitmapLabel= */ outputFileLabel, actualBitmap);
return actualBitmap;
}
public void release() {
if (frameProcessor != null) {
frameProcessor.release();
if (videoFrameProcessor != null) {
videoFrameProcessor.release();
}
}
}