Add ByteBufferGlEffect.Image for easier format conversion
PiperOrigin-RevId: 668506831
This commit is contained in:
parent
070e8217ac
commit
5c2dc7ed4e
@ -26,7 +26,6 @@ import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Color;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.Rect;
|
||||
import android.text.Spannable;
|
||||
import android.text.SpannableString;
|
||||
@ -44,7 +43,6 @@ import com.google.common.util.concurrent.ListenableFuture;
|
||||
import com.google.common.util.concurrent.ListeningExecutorService;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
@ -151,28 +149,13 @@ public class ByteBufferGlEffectTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListenableFuture<Bitmap> processPixelBuffer(
|
||||
ByteBuffer pixelBuffer, long presentationTimeUs) {
|
||||
// TODO: b/361286064 - Add helper functions for easier conversion to Bitmap.
|
||||
// The memory layout of pixels differs between OpenGL and Android Bitmap.
|
||||
// The first pixel in OpenGL is in the lower left corner, and the first
|
||||
// pixel in Android Bitmap is in the top left corner.
|
||||
// Mirror the Bitmap's Y axis.
|
||||
Bitmap bitmapInGlMemoryLayout =
|
||||
Bitmap.createBitmap(
|
||||
EFFECT_INPUT_FRAME_WIDTH, EFFECT_INPUT_FRAME_HEIGHT, Bitmap.Config.ARGB_8888);
|
||||
bitmapInGlMemoryLayout.copyPixelsFromBuffer(pixelBuffer);
|
||||
Matrix glToAndroidTransformation = new Matrix();
|
||||
glToAndroidTransformation.setScale(/* sx= */ 1, /* sy= */ -1);
|
||||
Bitmap inputBitmap =
|
||||
Bitmap.createBitmap(
|
||||
bitmapInGlMemoryLayout,
|
||||
/* x= */ 0,
|
||||
/* y= */ 0,
|
||||
bitmapInGlMemoryLayout.getWidth(),
|
||||
bitmapInGlMemoryLayout.getHeight(),
|
||||
glToAndroidTransformation,
|
||||
/* filter= */ true);
|
||||
public ListenableFuture<Bitmap> processImage(
|
||||
ByteBufferGlEffect.Image image, long presentationTimeUs) {
|
||||
checkState(image.width == EFFECT_INPUT_FRAME_WIDTH);
|
||||
checkState(image.height == EFFECT_INPUT_FRAME_HEIGHT);
|
||||
checkState(
|
||||
image.pixelBuffer.capacity() == EFFECT_INPUT_FRAME_WIDTH * EFFECT_INPUT_FRAME_HEIGHT * 4);
|
||||
Bitmap inputBitmap = image.copyToBitmap();
|
||||
inputBitmaps.add(inputBitmap);
|
||||
return drawingService.submit(
|
||||
() ->
|
||||
|
@ -113,8 +113,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
TexturePixelBuffer texturePixelBuffer = new TexturePixelBuffer(effectInputTexture);
|
||||
unmappedPixelBuffers.add(texturePixelBuffer);
|
||||
return Util.transformFutureAsync(
|
||||
texturePixelBuffer.byteBufferSettableFuture,
|
||||
(pixelBuffer) -> processor.processPixelBuffer(pixelBuffer, presentationTimeUs));
|
||||
texturePixelBuffer.imageSettableFuture,
|
||||
(image) -> processor.processImage(image, presentationTimeUs));
|
||||
} catch (GlUtil.GlException | VideoFrameProcessingException e) {
|
||||
return immediateFailedFuture(e);
|
||||
}
|
||||
@ -190,23 +190,26 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
* GlTextureInfo}.
|
||||
*/
|
||||
private final class TexturePixelBuffer {
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final PixelBufferObjectInfo pixelBufferObjectInfo;
|
||||
public final SettableFuture<ByteBuffer> byteBufferSettableFuture;
|
||||
public final SettableFuture<ByteBufferGlEffect.Image> imageSettableFuture;
|
||||
|
||||
private boolean mapped;
|
||||
|
||||
public TexturePixelBuffer(GlTextureInfo textureInfo) throws GlUtil.GlException {
|
||||
width = textureInfo.width;
|
||||
height = textureInfo.height;
|
||||
int pixelBufferSize = texturePixelBufferSize(textureInfo);
|
||||
pixelBufferObjectInfo = pixelBufferObjectProvider.getPixelBufferObject(pixelBufferSize);
|
||||
GlUtil.schedulePixelBufferRead(
|
||||
textureInfo.fboId, textureInfo.width, textureInfo.height, pixelBufferObjectInfo.id);
|
||||
byteBufferSettableFuture = SettableFuture.create();
|
||||
GlUtil.schedulePixelBufferRead(textureInfo.fboId, width, height, pixelBufferObjectInfo.id);
|
||||
imageSettableFuture = SettableFuture.create();
|
||||
}
|
||||
|
||||
public void map() throws GlUtil.GlException {
|
||||
ByteBuffer byteBuffer =
|
||||
GlUtil.mapPixelBufferObject(pixelBufferObjectInfo.id, pixelBufferObjectInfo.size);
|
||||
byteBufferSettableFuture.set(byteBuffer);
|
||||
imageSettableFuture.set(new ByteBufferGlEffect.Image(width, height, byteBuffer));
|
||||
mapped = true;
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,10 @@ package androidx.media3.effect;
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.Rect;
|
||||
import android.opengl.GLES20;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.util.Size;
|
||||
@ -29,11 +32,11 @@ import java.util.concurrent.Future;
|
||||
|
||||
/**
|
||||
* A {@link GlEffect} implementation that runs an asynchronous {@link Processor} on video frame data
|
||||
* passed in as a {@link ByteBuffer}.
|
||||
* passed in as a {@link ByteBufferGlEffect.Image}.
|
||||
*
|
||||
* <p>This effect can be used to apply CPU-based effects. Or the provided {@link ByteBuffer} can be
|
||||
* passed to other heterogeneous compute components that are available such as another GPU context,
|
||||
* FPGAs, or NPUs.
|
||||
* <p>This effect can be used to apply CPU-based effects. Or the provided {@link
|
||||
* ByteBufferGlEffect.Image} can be passed to other heterogeneous compute components that are
|
||||
* available such as another GPU context, FPGAs, or NPUs.
|
||||
*/
|
||||
@UnstableApi
|
||||
/* package */ class ByteBufferGlEffect<T> implements GlEffect {
|
||||
@ -41,6 +44,69 @@ import java.util.concurrent.Future;
|
||||
private static final int DEFAULT_QUEUE_SIZE = 6;
|
||||
private static final int DEFAULT_PENDING_PIXEL_BUFFER_QUEUE_SIZE = 1;
|
||||
|
||||
/** A class that represents image data is backed by a {@link ByteBuffer}. */
|
||||
public static class Image {
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final ByteBuffer pixelBuffer;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
*
|
||||
* <p>The first pixel in the pixel buffer is the lower left corner of the image. Pixels are in
|
||||
* row order from the lowest to the highest row, left to right in each row.
|
||||
*
|
||||
* <p>The order of pixels is the same as the output of {@link GLES20#glReadPixels}, and differs
|
||||
* from the order of pixels of {@link Bitmap}.
|
||||
*
|
||||
* <p>For each pixel, the byte order is the same as {@link Bitmap.Config#ARGB_8888}. Each pixel
|
||||
* is stored in 4 bytes. Each channel (RGB and alpha for translucency) is stored with 8 bits of
|
||||
* precision. Use this formula to pack colors into 32 bits:
|
||||
*
|
||||
* <pre class="prettyprint">
|
||||
* {@code int color = (A & 0xff) << 24 | (B & 0xff) << 16 | (G & 0xff) << 8 | (R & 0xff);}
|
||||
* </pre>
|
||||
*
|
||||
* <p>On a little-endian machine, pixelBuffer.get(0) is the red pixel.
|
||||
*
|
||||
* @param width The width of the image.
|
||||
* @param height The height of the image.
|
||||
* @param pixelBuffer The pixel buffer.
|
||||
*/
|
||||
/* package */ Image(int width, int height, ByteBuffer pixelBuffer) {
|
||||
checkArgument(pixelBuffer.capacity() == width * height * 4);
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.pixelBuffer = pixelBuffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link Bitmap} that contains a copy of the pixel buffer.
|
||||
*
|
||||
* <p>The returned {@link Bitmap} has config {@link Bitmap.Config#ARGB_8888}.
|
||||
*
|
||||
* <p>This method copies the pixel data and is less efficient than accessing the {@linkplain
|
||||
* #pixelBuffer pixel buffer} directly.
|
||||
*/
|
||||
public Bitmap copyToBitmap() {
|
||||
// The order of pixels differs between OpenGL and Android Bitmap. The first pixel in OpenGL is
|
||||
// in the lower left corner, and the first pixel in Android Bitmap is in the top left corner.
|
||||
// Mirror the Bitmap's Y axis to return the correct pixel order.
|
||||
Bitmap bitmapInGlPixelLayout = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
|
||||
bitmapInGlPixelLayout.copyPixelsFromBuffer(pixelBuffer);
|
||||
Matrix glToAndroidTransformation = new Matrix();
|
||||
glToAndroidTransformation.setScale(/* sx= */ 1, /* sy= */ -1);
|
||||
return Bitmap.createBitmap(
|
||||
bitmapInGlPixelLayout,
|
||||
/* x= */ 0,
|
||||
/* y= */ 0,
|
||||
bitmapInGlPixelLayout.getWidth(),
|
||||
bitmapInGlPixelLayout.getHeight(),
|
||||
glToAndroidTransformation,
|
||||
/* filter= */ true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A processor that takes in {@link ByteBuffer ByteBuffers} that represent input image data, and
|
||||
* produces results of type {@code <T>}.
|
||||
@ -53,21 +119,21 @@ import java.util.concurrent.Future;
|
||||
|
||||
/**
|
||||
* Configures the instance and returns the dimensions of the image required by {@link
|
||||
* #processPixelBuffer}.
|
||||
* #processImage}.
|
||||
*
|
||||
* <p>When the returned dimensions differ from {@code inputWidth} and {@code inputHeight}, the
|
||||
* image will be scaled based on {@link #getScaledRegion}.
|
||||
*
|
||||
* @param inputWidth The input width in pixels.
|
||||
* @param inputHeight The input height in pixels.
|
||||
* @return The size in pixels of the image data accepted by {@link #processPixelBuffer}.
|
||||
* @return The size in pixels of the image data accepted by {@link #processImage}.
|
||||
* @throws VideoFrameProcessingException On error.
|
||||
*/
|
||||
Size configure(int inputWidth, int inputHeight) throws VideoFrameProcessingException;
|
||||
|
||||
/**
|
||||
* Selects a region of the input texture that will be scaled to fill the image given that is
|
||||
* given to {@link #processPixelBuffer}.
|
||||
* Selects a region of the input texture that will be scaled to fill the image that is given to
|
||||
* {@link #processImage}.
|
||||
*
|
||||
* <p>Called once per input frame.
|
||||
*
|
||||
@ -82,17 +148,16 @@ import java.util.concurrent.Future;
|
||||
Rect getScaledRegion(long presentationTimeUs);
|
||||
|
||||
/**
|
||||
* Processing the image data in the {@code pixelBuffer}.
|
||||
* Processing the image data in the {@code image}.
|
||||
*
|
||||
* <p>Accessing {@code pixelBuffer} after the returned future is {@linkplain Future#isDone()
|
||||
* done} or {@linkplain Future#isCancelled() cancelled} can lead to undefined behaviour.
|
||||
* <p>Accessing {@code image} after the returned future is {@linkplain Future#isDone() done} or
|
||||
* {@linkplain Future#isCancelled() cancelled} can lead to undefined behaviour.
|
||||
*
|
||||
* @param pixelBuffer The image data.
|
||||
* @param image The image data.
|
||||
* @param presentationTimeUs The presentation time in microseconds.
|
||||
* @return A {@link ListenableFuture} of the result.
|
||||
*/
|
||||
// TODO: b/361286064 - Add helper functions for easier conversion to Bitmap.
|
||||
ListenableFuture<T> processPixelBuffer(ByteBuffer pixelBuffer, long presentationTimeUs);
|
||||
ListenableFuture<T> processImage(Image image, long presentationTimeUs);
|
||||
|
||||
/**
|
||||
* Finishes processing the frame at {@code presentationTimeUs}. Use this method to perform
|
||||
@ -103,7 +168,7 @@ import java.util.concurrent.Future;
|
||||
*
|
||||
* @param outputFrame The texture info of the frame.
|
||||
* @param presentationTimeUs The presentation timestamp of the frame, in microseconds.
|
||||
* @param result The result of the asynchronous computation in {@link #processPixelBuffer}.
|
||||
* @param result The result of the asynchronous computation in {@link #processImage}.
|
||||
*/
|
||||
void finishProcessingAndBlend(GlTextureInfo outputFrame, long presentationTimeUs, T result)
|
||||
throws VideoFrameProcessingException;
|
||||
|
Loading…
x
Reference in New Issue
Block a user