Add ByteBufferGlEffect.Image for easier format conversion

PiperOrigin-RevId: 668506831
This commit is contained in:
dancho 2024-08-28 10:19:19 -07:00 committed by Copybara-Service
parent 070e8217ac
commit 5c2dc7ed4e
3 changed files with 97 additions and 46 deletions

View File

@ -26,7 +26,6 @@ import static com.google.common.truth.Truth.assertThat;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Rect; import android.graphics.Rect;
import android.text.Spannable; import android.text.Spannable;
import android.text.SpannableString; import android.text.SpannableString;
@ -44,7 +43,6 @@ import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -151,28 +149,13 @@ public class ByteBufferGlEffectTest {
} }
@Override @Override
public ListenableFuture<Bitmap> processPixelBuffer( public ListenableFuture<Bitmap> processImage(
ByteBuffer pixelBuffer, long presentationTimeUs) { ByteBufferGlEffect.Image image, long presentationTimeUs) {
// TODO: b/361286064 - Add helper functions for easier conversion to Bitmap. checkState(image.width == EFFECT_INPUT_FRAME_WIDTH);
// The memory layout of pixels differs between OpenGL and Android Bitmap. checkState(image.height == EFFECT_INPUT_FRAME_HEIGHT);
// The first pixel in OpenGL is in the lower left corner, and the first checkState(
// pixel in Android Bitmap is in the top left corner. image.pixelBuffer.capacity() == EFFECT_INPUT_FRAME_WIDTH * EFFECT_INPUT_FRAME_HEIGHT * 4);
// Mirror the Bitmap's Y axis. Bitmap inputBitmap = image.copyToBitmap();
Bitmap bitmapInGlMemoryLayout =
Bitmap.createBitmap(
EFFECT_INPUT_FRAME_WIDTH, EFFECT_INPUT_FRAME_HEIGHT, Bitmap.Config.ARGB_8888);
bitmapInGlMemoryLayout.copyPixelsFromBuffer(pixelBuffer);
Matrix glToAndroidTransformation = new Matrix();
glToAndroidTransformation.setScale(/* sx= */ 1, /* sy= */ -1);
Bitmap inputBitmap =
Bitmap.createBitmap(
bitmapInGlMemoryLayout,
/* x= */ 0,
/* y= */ 0,
bitmapInGlMemoryLayout.getWidth(),
bitmapInGlMemoryLayout.getHeight(),
glToAndroidTransformation,
/* filter= */ true);
inputBitmaps.add(inputBitmap); inputBitmaps.add(inputBitmap);
return drawingService.submit( return drawingService.submit(
() -> () ->

View File

@ -113,8 +113,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
TexturePixelBuffer texturePixelBuffer = new TexturePixelBuffer(effectInputTexture); TexturePixelBuffer texturePixelBuffer = new TexturePixelBuffer(effectInputTexture);
unmappedPixelBuffers.add(texturePixelBuffer); unmappedPixelBuffers.add(texturePixelBuffer);
return Util.transformFutureAsync( return Util.transformFutureAsync(
texturePixelBuffer.byteBufferSettableFuture, texturePixelBuffer.imageSettableFuture,
(pixelBuffer) -> processor.processPixelBuffer(pixelBuffer, presentationTimeUs)); (image) -> processor.processImage(image, presentationTimeUs));
} catch (GlUtil.GlException | VideoFrameProcessingException e) { } catch (GlUtil.GlException | VideoFrameProcessingException e) {
return immediateFailedFuture(e); return immediateFailedFuture(e);
} }
@ -190,23 +190,26 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* GlTextureInfo}. * GlTextureInfo}.
*/ */
private final class TexturePixelBuffer { private final class TexturePixelBuffer {
public final int width;
public final int height;
public final PixelBufferObjectInfo pixelBufferObjectInfo; public final PixelBufferObjectInfo pixelBufferObjectInfo;
public final SettableFuture<ByteBuffer> byteBufferSettableFuture; public final SettableFuture<ByteBufferGlEffect.Image> imageSettableFuture;
private boolean mapped; private boolean mapped;
public TexturePixelBuffer(GlTextureInfo textureInfo) throws GlUtil.GlException { public TexturePixelBuffer(GlTextureInfo textureInfo) throws GlUtil.GlException {
width = textureInfo.width;
height = textureInfo.height;
int pixelBufferSize = texturePixelBufferSize(textureInfo); int pixelBufferSize = texturePixelBufferSize(textureInfo);
pixelBufferObjectInfo = pixelBufferObjectProvider.getPixelBufferObject(pixelBufferSize); pixelBufferObjectInfo = pixelBufferObjectProvider.getPixelBufferObject(pixelBufferSize);
GlUtil.schedulePixelBufferRead( GlUtil.schedulePixelBufferRead(textureInfo.fboId, width, height, pixelBufferObjectInfo.id);
textureInfo.fboId, textureInfo.width, textureInfo.height, pixelBufferObjectInfo.id); imageSettableFuture = SettableFuture.create();
byteBufferSettableFuture = SettableFuture.create();
} }
public void map() throws GlUtil.GlException { public void map() throws GlUtil.GlException {
ByteBuffer byteBuffer = ByteBuffer byteBuffer =
GlUtil.mapPixelBufferObject(pixelBufferObjectInfo.id, pixelBufferObjectInfo.size); GlUtil.mapPixelBufferObject(pixelBufferObjectInfo.id, pixelBufferObjectInfo.size);
byteBufferSettableFuture.set(byteBuffer); imageSettableFuture.set(new ByteBufferGlEffect.Image(width, height, byteBuffer));
mapped = true; mapped = true;
} }

View File

@ -18,7 +18,10 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.Rect; import android.graphics.Rect;
import android.opengl.GLES20;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.Size; import androidx.media3.common.util.Size;
@ -29,11 +32,11 @@ import java.util.concurrent.Future;
/** /**
* A {@link GlEffect} implementation that runs an asynchronous {@link Processor} on video frame data * A {@link GlEffect} implementation that runs an asynchronous {@link Processor} on video frame data
* passed in as a {@link ByteBuffer}. * passed in as a {@link ByteBufferGlEffect.Image}.
* *
* <p>This effect can be used to apply CPU-based effects. Or the provided {@link ByteBuffer} can be * <p>This effect can be used to apply CPU-based effects. Or the provided {@link
* passed to other heterogeneous compute components that are available such as another GPU context, * ByteBufferGlEffect.Image} can be passed to other heterogeneous compute components that are
* FPGAs, or NPUs. * available such as another GPU context, FPGAs, or NPUs.
*/ */
@UnstableApi @UnstableApi
/* package */ class ByteBufferGlEffect<T> implements GlEffect { /* package */ class ByteBufferGlEffect<T> implements GlEffect {
@ -41,6 +44,69 @@ import java.util.concurrent.Future;
private static final int DEFAULT_QUEUE_SIZE = 6; private static final int DEFAULT_QUEUE_SIZE = 6;
private static final int DEFAULT_PENDING_PIXEL_BUFFER_QUEUE_SIZE = 1; private static final int DEFAULT_PENDING_PIXEL_BUFFER_QUEUE_SIZE = 1;
/** A class that represents image data is backed by a {@link ByteBuffer}. */
public static class Image {
public final int width;
public final int height;
public final ByteBuffer pixelBuffer;
/**
* Creates an instance.
*
* <p>The first pixel in the pixel buffer is the lower left corner of the image. Pixels are in
* row order from the lowest to the highest row, left to right in each row.
*
* <p>The order of pixels is the same as the output of {@link GLES20#glReadPixels}, and differs
* from the order of pixels of {@link Bitmap}.
*
* <p>For each pixel, the byte order is the same as {@link Bitmap.Config#ARGB_8888}. Each pixel
* is stored in 4 bytes. Each channel (RGB and alpha for translucency) is stored with 8 bits of
* precision. Use this formula to pack colors into 32 bits:
*
* <pre class="prettyprint">
* {@code int color = (A & 0xff) << 24 | (B & 0xff) << 16 | (G & 0xff) << 8 | (R & 0xff);}
* </pre>
*
* <p>On a little-endian machine, pixelBuffer.get(0) is the red pixel.
*
* @param width The width of the image.
* @param height The height of the image.
* @param pixelBuffer The pixel buffer.
*/
/* package */ Image(int width, int height, ByteBuffer pixelBuffer) {
checkArgument(pixelBuffer.capacity() == width * height * 4);
this.width = width;
this.height = height;
this.pixelBuffer = pixelBuffer;
}
/**
* Returns a {@link Bitmap} that contains a copy of the pixel buffer.
*
* <p>The returned {@link Bitmap} has config {@link Bitmap.Config#ARGB_8888}.
*
* <p>This method copies the pixel data and is less efficient than accessing the {@linkplain
* #pixelBuffer pixel buffer} directly.
*/
public Bitmap copyToBitmap() {
// The order of pixels differs between OpenGL and Android Bitmap. The first pixel in OpenGL is
// in the lower left corner, and the first pixel in Android Bitmap is in the top left corner.
// Mirror the Bitmap's Y axis to return the correct pixel order.
Bitmap bitmapInGlPixelLayout = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmapInGlPixelLayout.copyPixelsFromBuffer(pixelBuffer);
Matrix glToAndroidTransformation = new Matrix();
glToAndroidTransformation.setScale(/* sx= */ 1, /* sy= */ -1);
return Bitmap.createBitmap(
bitmapInGlPixelLayout,
/* x= */ 0,
/* y= */ 0,
bitmapInGlPixelLayout.getWidth(),
bitmapInGlPixelLayout.getHeight(),
glToAndroidTransformation,
/* filter= */ true);
}
}
/** /**
* A processor that takes in {@link ByteBuffer ByteBuffers} that represent input image data, and * A processor that takes in {@link ByteBuffer ByteBuffers} that represent input image data, and
* produces results of type {@code <T>}. * produces results of type {@code <T>}.
@ -53,21 +119,21 @@ import java.util.concurrent.Future;
/** /**
* Configures the instance and returns the dimensions of the image required by {@link * Configures the instance and returns the dimensions of the image required by {@link
* #processPixelBuffer}. * #processImage}.
* *
* <p>When the returned dimensions differ from {@code inputWidth} and {@code inputHeight}, the * <p>When the returned dimensions differ from {@code inputWidth} and {@code inputHeight}, the
* image will be scaled based on {@link #getScaledRegion}. * image will be scaled based on {@link #getScaledRegion}.
* *
* @param inputWidth The input width in pixels. * @param inputWidth The input width in pixels.
* @param inputHeight The input height in pixels. * @param inputHeight The input height in pixels.
* @return The size in pixels of the image data accepted by {@link #processPixelBuffer}. * @return The size in pixels of the image data accepted by {@link #processImage}.
* @throws VideoFrameProcessingException On error. * @throws VideoFrameProcessingException On error.
*/ */
Size configure(int inputWidth, int inputHeight) throws VideoFrameProcessingException; Size configure(int inputWidth, int inputHeight) throws VideoFrameProcessingException;
/** /**
* Selects a region of the input texture that will be scaled to fill the image given that is * Selects a region of the input texture that will be scaled to fill the image that is given to
* given to {@link #processPixelBuffer}. * {@link #processImage}.
* *
* <p>Called once per input frame. * <p>Called once per input frame.
* *
@ -82,17 +148,16 @@ import java.util.concurrent.Future;
Rect getScaledRegion(long presentationTimeUs); Rect getScaledRegion(long presentationTimeUs);
/** /**
* Processing the image data in the {@code pixelBuffer}. * Processing the image data in the {@code image}.
* *
* <p>Accessing {@code pixelBuffer} after the returned future is {@linkplain Future#isDone() * <p>Accessing {@code image} after the returned future is {@linkplain Future#isDone() done} or
* done} or {@linkplain Future#isCancelled() cancelled} can lead to undefined behaviour. * {@linkplain Future#isCancelled() cancelled} can lead to undefined behaviour.
* *
* @param pixelBuffer The image data. * @param image The image data.
* @param presentationTimeUs The presentation time in microseconds. * @param presentationTimeUs The presentation time in microseconds.
* @return A {@link ListenableFuture} of the result. * @return A {@link ListenableFuture} of the result.
*/ */
// TODO: b/361286064 - Add helper functions for easier conversion to Bitmap. ListenableFuture<T> processImage(Image image, long presentationTimeUs);
ListenableFuture<T> processPixelBuffer(ByteBuffer pixelBuffer, long presentationTimeUs);
/** /**
* Finishes processing the frame at {@code presentationTimeUs}. Use this method to perform * Finishes processing the frame at {@code presentationTimeUs}. Use this method to perform
@ -103,7 +168,7 @@ import java.util.concurrent.Future;
* *
* @param outputFrame The texture info of the frame. * @param outputFrame The texture info of the frame.
* @param presentationTimeUs The presentation timestamp of the frame, in microseconds. * @param presentationTimeUs The presentation timestamp of the frame, in microseconds.
* @param result The result of the asynchronous computation in {@link #processPixelBuffer}. * @param result The result of the asynchronous computation in {@link #processImage}.
*/ */
void finishProcessingAndBlend(GlTextureInfo outputFrame, long presentationTimeUs, T result) void finishProcessingAndBlend(GlTextureInfo outputFrame, long presentationTimeUs, T result)
throws VideoFrameProcessingException; throws VideoFrameProcessingException;