Effect: Create basic VideoCompositor.
Allow two DefaultVideoFrameProcessor instances to output to one VideoCompositor, which then outputs a frame. PiperOrigin-RevId: 544705821
This commit is contained in:
parent
854c5254ec
commit
9945033867
@ -0,0 +1,32 @@
|
|||||||
|
#version 100
|
||||||
|
// Copyright 2023 The Android Open Source Project
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Basic ES2 compositor shader that samples from a (non-external) textures
|
||||||
|
// with uTexSampler1 and uTexSampler2, copying each with alpha = .5 to the
|
||||||
|
// output.
|
||||||
|
// TODO: b/262694346 - Allow alpha to be customized for each input.
|
||||||
|
// TODO: b/262694346 - Allow for an arbitrary amount of inputs.
|
||||||
|
|
||||||
|
precision mediump float;
|
||||||
|
uniform sampler2D uTexSampler1;
|
||||||
|
uniform sampler2D uTexSampler2;
|
||||||
|
varying vec2 vTexSamplingCoord;
|
||||||
|
|
||||||
|
void main() {
|
||||||
|
vec4 inputColor1 = texture2D(uTexSampler1, vTexSamplingCoord);
|
||||||
|
vec4 inputColor2 = texture2D(uTexSampler2, vTexSamplingCoord);
|
||||||
|
gl_FragColor = vec4(inputColor1.rgb * 0.5 + inputColor2.rgb * 0.5, 1.0);
|
||||||
|
gl_FragColor.a = 1.0;
|
||||||
|
}
|
@ -0,0 +1,216 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2023 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.effect;
|
||||||
|
|
||||||
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.opengl.GLES20;
|
||||||
|
import androidx.annotation.IntRange;
|
||||||
|
import androidx.media3.common.GlObjectsProvider;
|
||||||
|
import androidx.media3.common.GlTextureInfo;
|
||||||
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
|
import androidx.media3.common.util.GlProgram;
|
||||||
|
import androidx.media3.common.util.GlUtil;
|
||||||
|
import androidx.media3.common.util.UnstableApi;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayDeque;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Queue;
|
||||||
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A basic VideoCompositor that takes in frames from exactly 2 input sources and combines it to one
|
||||||
|
* output. Only tested for 2 frames in, 1 frame out for now.
|
||||||
|
*/
|
||||||
|
@UnstableApi
|
||||||
|
public final class VideoCompositor {
|
||||||
|
// TODO: b/262694346 - Flesh out this implementation by doing the following:
|
||||||
|
// * Create on a shared VideoFrameProcessingTaskExecutor with VideoFrameProcessor instances.
|
||||||
|
// * >1 input/output frame per source.
|
||||||
|
// * Handle matched timestamps.
|
||||||
|
// * Handle mismatched timestamps
|
||||||
|
// * Before allowing customization of this class, add an interface, and rename this class to
|
||||||
|
// DefaultCompositor.
|
||||||
|
|
||||||
|
private static final String VERTEX_SHADER_PATH = "shaders/vertex_shader_transformation_es2.glsl";
|
||||||
|
|
||||||
|
private static final String FRAGMENT_SHADER_PATH = "shaders/fragment_shader_compositor_es2.glsl";
|
||||||
|
|
||||||
|
private final Context context;
|
||||||
|
private final DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener;
|
||||||
|
private final GlObjectsProvider glObjectsProvider;
|
||||||
|
// List of queues of unprocessed frames for each input source.
|
||||||
|
private final List<Queue<InputFrameInfo>> inputFrameInfos;
|
||||||
|
|
||||||
|
private final TexturePool outputTexturePool;
|
||||||
|
// Only used on the GL Thread.
|
||||||
|
private @MonotonicNonNull GlProgram glProgram;
|
||||||
|
private long syncObject;
|
||||||
|
|
||||||
|
public VideoCompositor(
|
||||||
|
Context context,
|
||||||
|
GlObjectsProvider glObjectsProvider,
|
||||||
|
DefaultVideoFrameProcessor.TextureOutputListener textureOutputListener,
|
||||||
|
@IntRange(from = 1) int textureOutputCapacity) {
|
||||||
|
this.context = context;
|
||||||
|
this.textureOutputListener = textureOutputListener;
|
||||||
|
this.glObjectsProvider = glObjectsProvider;
|
||||||
|
|
||||||
|
inputFrameInfos = new ArrayList<>();
|
||||||
|
outputTexturePool =
|
||||||
|
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers a new input source, and returns a unique {@code inputId} corresponding to this
|
||||||
|
* source, to be used in {@link #queueInputTexture}.
|
||||||
|
*/
|
||||||
|
public synchronized int registerInputSource() {
|
||||||
|
inputFrameInfos.add(new ArrayDeque<>());
|
||||||
|
return inputFrameInfos.size() - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Below methods must be called on the GL thread.
|
||||||
|
/**
|
||||||
|
* Queues an input texture to be composited, for example from an upstream {@link
|
||||||
|
* DefaultVideoFrameProcessor.TextureOutputListener}.
|
||||||
|
*
|
||||||
|
* <p>Each input source must have a unique {@code inputId} returned from {@link
|
||||||
|
* #registerInputSource}.
|
||||||
|
*/
|
||||||
|
public void queueInputTexture(
|
||||||
|
int inputId,
|
||||||
|
GlTextureInfo inputTexture,
|
||||||
|
long presentationTimeUs,
|
||||||
|
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseTextureCallback)
|
||||||
|
throws VideoFrameProcessingException {
|
||||||
|
InputFrameInfo inputFrameInfo =
|
||||||
|
new InputFrameInfo(inputTexture, presentationTimeUs, releaseTextureCallback);
|
||||||
|
checkNotNull(inputFrameInfos.get(inputId)).add(inputFrameInfo);
|
||||||
|
|
||||||
|
if (isReadyToComposite()) {
|
||||||
|
compositeToOutputTexture();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isReadyToComposite() {
|
||||||
|
// TODO: b/262694346 - Use timestamps to determine when to composite instead of number of
|
||||||
|
// frames.
|
||||||
|
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
|
||||||
|
if (checkNotNull(inputFrameInfos.get(inputId)).isEmpty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void compositeToOutputTexture() throws VideoFrameProcessingException {
|
||||||
|
List<InputFrameInfo> framesToComposite = new ArrayList<>();
|
||||||
|
for (int inputId = 0; inputId < inputFrameInfos.size(); inputId++) {
|
||||||
|
framesToComposite.add(checkNotNull(inputFrameInfos.get(inputId)).remove());
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureGlProgramConfigured();
|
||||||
|
// TODO: b/262694346 -
|
||||||
|
// * Support an arbitrary number of inputs.
|
||||||
|
// * Allow different frame dimensions.
|
||||||
|
InputFrameInfo inputFrame1 = framesToComposite.get(0);
|
||||||
|
InputFrameInfo inputFrame2 = framesToComposite.get(1);
|
||||||
|
checkState(inputFrame1.texture.getWidth() == inputFrame2.texture.getWidth());
|
||||||
|
checkState(inputFrame1.texture.getHeight() == inputFrame2.texture.getHeight());
|
||||||
|
try {
|
||||||
|
outputTexturePool.ensureConfigured(
|
||||||
|
glObjectsProvider, inputFrame1.texture.getWidth(), inputFrame1.texture.getHeight());
|
||||||
|
GlTextureInfo outputTexture = outputTexturePool.useTexture();
|
||||||
|
|
||||||
|
drawFrame(inputFrame1.texture, inputFrame2.texture, outputTexture);
|
||||||
|
syncObject = GlUtil.createGlSyncFence();
|
||||||
|
|
||||||
|
for (int i = 0; i < framesToComposite.size(); i++) {
|
||||||
|
InputFrameInfo inputFrameInfo = framesToComposite.get(i);
|
||||||
|
inputFrameInfo.releaseCallback.release(inputFrameInfo.presentationTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: b/262694346 - Use presentationTimeUs here for freeing textures.
|
||||||
|
textureOutputListener.onTextureRendered(
|
||||||
|
checkNotNull(outputTexture),
|
||||||
|
/* presentationTimeUs= */ 0,
|
||||||
|
(presentationTimeUs) -> outputTexturePool.freeTexture(),
|
||||||
|
syncObject);
|
||||||
|
} catch (GlUtil.GlException e) {
|
||||||
|
throw VideoFrameProcessingException.from(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ensureGlProgramConfigured() throws VideoFrameProcessingException {
|
||||||
|
if (glProgram != null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
glProgram = new GlProgram(context, VERTEX_SHADER_PATH, FRAGMENT_SHADER_PATH);
|
||||||
|
glProgram.setBufferAttribute(
|
||||||
|
"aFramePosition",
|
||||||
|
GlUtil.getNormalizedCoordinateBounds(),
|
||||||
|
GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE);
|
||||||
|
} catch (GlUtil.GlException | IOException e) {
|
||||||
|
throw new VideoFrameProcessingException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void drawFrame(
|
||||||
|
GlTextureInfo inputTexture1, GlTextureInfo inputTexture2, GlTextureInfo outputTexture)
|
||||||
|
throws GlUtil.GlException {
|
||||||
|
GlUtil.focusFramebufferUsingCurrentContext(
|
||||||
|
outputTexture.getFboId(), outputTexture.getWidth(), outputTexture.getHeight());
|
||||||
|
GlUtil.clearFocusedBuffers();
|
||||||
|
|
||||||
|
GlProgram glProgram = checkNotNull(this.glProgram);
|
||||||
|
glProgram.use();
|
||||||
|
glProgram.setSamplerTexIdUniform(
|
||||||
|
"uTexSampler1", inputTexture1.getTexId(), /* texUnitIndex= */ 0);
|
||||||
|
glProgram.setSamplerTexIdUniform(
|
||||||
|
"uTexSampler2", inputTexture2.getTexId(), /* texUnitIndex= */ 1);
|
||||||
|
|
||||||
|
glProgram.setFloatsUniform("uTexTransformationMatrix", GlUtil.create4x4IdentityMatrix());
|
||||||
|
glProgram.setFloatsUniform("uTransformationMatrix", GlUtil.create4x4IdentityMatrix());
|
||||||
|
glProgram.setBufferAttribute(
|
||||||
|
"aFramePosition",
|
||||||
|
GlUtil.getNormalizedCoordinateBounds(),
|
||||||
|
GlUtil.HOMOGENEOUS_COORDINATE_VECTOR_SIZE);
|
||||||
|
glProgram.bindAttributesAndUniforms();
|
||||||
|
// The four-vertex triangle strip forms a quad.
|
||||||
|
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Holds information on a frame and how to release it. */
|
||||||
|
private static final class InputFrameInfo {
|
||||||
|
public final GlTextureInfo texture;
|
||||||
|
public final long presentationTimeUs;
|
||||||
|
public final DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseCallback;
|
||||||
|
|
||||||
|
public InputFrameInfo(
|
||||||
|
GlTextureInfo texture,
|
||||||
|
long presentationTimeUs,
|
||||||
|
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseCallback) {
|
||||||
|
this.texture = texture;
|
||||||
|
this.presentationTimeUs = presentationTimeUs;
|
||||||
|
this.releaseCallback = releaseCallback;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Binary file not shown.
After Width: | Height: | Size: 12 KiB |
Binary file not shown.
After Width: | Height: | Size: 7.1 KiB |
Binary file not shown.
After Width: | Height: | Size: 7.4 KiB |
@ -13,7 +13,7 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package androidx.media3.transformer.mh;
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
@ -69,10 +69,7 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
|
|||||||
return outputTimestampsToBitmaps.keySet();
|
return outputTimestampsToBitmaps.keySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void readBitmapFromTexture(
|
public void readBitmap(GlTextureInfo outputTexture, long presentationTimeUs)
|
||||||
GlTextureInfo outputTexture,
|
|
||||||
long presentationTimeUs,
|
|
||||||
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
|
|
||||||
throws VideoFrameProcessingException {
|
throws VideoFrameProcessingException {
|
||||||
try {
|
try {
|
||||||
GlUtil.focusFramebufferUsingCurrentContext(
|
GlUtil.focusFramebufferUsingCurrentContext(
|
||||||
@ -80,10 +77,18 @@ public final class TextureBitmapReader implements VideoFrameProcessorTestRunner.
|
|||||||
outputBitmap =
|
outputBitmap =
|
||||||
createBitmapFromCurrentGlFrameBuffer(
|
createBitmapFromCurrentGlFrameBuffer(
|
||||||
outputTexture.getWidth(), outputTexture.getHeight(), useHighPrecisionColorComponents);
|
outputTexture.getWidth(), outputTexture.getHeight(), useHighPrecisionColorComponents);
|
||||||
|
outputTimestampsToBitmaps.put(presentationTimeUs, outputBitmap);
|
||||||
} catch (GlUtil.GlException e) {
|
} catch (GlUtil.GlException e) {
|
||||||
throw new VideoFrameProcessingException(e);
|
throw new VideoFrameProcessingException(e);
|
||||||
}
|
}
|
||||||
outputTimestampsToBitmaps.put(presentationTimeUs, outputBitmap);
|
}
|
||||||
|
|
||||||
|
public void readBitmapAndReleaseTexture(
|
||||||
|
GlTextureInfo outputTexture,
|
||||||
|
long presentationTimeUs,
|
||||||
|
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback releaseOutputTextureCallback)
|
||||||
|
throws VideoFrameProcessingException, GlUtil.GlException {
|
||||||
|
readBitmap(outputTexture, presentationTimeUs);
|
||||||
releaseOutputTextureCallback.release(presentationTimeUs);
|
releaseOutputTextureCallback.release(presentationTimeUs);
|
||||||
}
|
}
|
||||||
|
|
@ -0,0 +1,200 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2023 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
|
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE;
|
||||||
|
import static androidx.media3.test.utils.BitmapPixelTestUtil.maybeSaveTestBitmap;
|
||||||
|
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||||
|
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||||
|
import static com.google.common.truth.Truth.assertThat;
|
||||||
|
|
||||||
|
import android.graphics.Bitmap;
|
||||||
|
import android.opengl.EGLContext;
|
||||||
|
import androidx.media3.common.C;
|
||||||
|
import androidx.media3.common.ColorInfo;
|
||||||
|
import androidx.media3.common.Effect;
|
||||||
|
import androidx.media3.common.GlObjectsProvider;
|
||||||
|
import androidx.media3.common.GlTextureInfo;
|
||||||
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
|
import androidx.media3.common.VideoFrameProcessor;
|
||||||
|
import androidx.media3.common.util.GlUtil;
|
||||||
|
import androidx.media3.effect.DefaultGlObjectsProvider;
|
||||||
|
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||||
|
import androidx.media3.effect.RgbFilter;
|
||||||
|
import androidx.media3.effect.ScaleAndRotateTransformation;
|
||||||
|
import androidx.media3.effect.VideoCompositor;
|
||||||
|
import androidx.media3.test.utils.BitmapPixelTestUtil;
|
||||||
|
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||||
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
|
||||||
|
/** Pixel test for {@link VideoCompositor} compositing 2 input frames into 1 output frame. */
|
||||||
|
@RunWith(AndroidJUnit4.class)
|
||||||
|
public final class VideoCompositorPixelTest {
|
||||||
|
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner1;
|
||||||
|
private @MonotonicNonNull VideoFrameProcessorTestRunner inputVfpTestRunner2;
|
||||||
|
|
||||||
|
private static final String ORIGINAL_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png";
|
||||||
|
private static final String GRAYSCALE_PNG_ASSET_PATH =
|
||||||
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscale_media3test.png";
|
||||||
|
private static final String ROTATE180_PNG_ASSET_PATH =
|
||||||
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/rotate180_media3test.png";
|
||||||
|
private static final String GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH =
|
||||||
|
"media/bitmap/sample_mp4_first_frame/electrical_colors/grayscaleAndRotate180Composite.png";
|
||||||
|
|
||||||
|
private static final Effect ROTATE_180 =
|
||||||
|
new ScaleAndRotateTransformation.Builder().setRotationDegrees(180).build();
|
||||||
|
private static final Effect GRAYSCALE = RgbFilter.createGrayscaleFilter();
|
||||||
|
|
||||||
|
// TODO: b/262694346 - Create and share a VideoFrameProcessingTaskExecutor for all
|
||||||
|
// DefaultVideoFrameProcessor and VideoCompositor instances.
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void release() {
|
||||||
|
if (inputVfpTestRunner1 != null) {
|
||||||
|
inputVfpTestRunner1.release();
|
||||||
|
}
|
||||||
|
if (inputVfpTestRunner2 != null) {
|
||||||
|
inputVfpTestRunner2.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void compositeTwoFrames_matchesExpected() throws Exception {
|
||||||
|
String testId = "compositeTwoFrames_matchesExpected";
|
||||||
|
|
||||||
|
// Arrange VideoCompositor and VideoFrameProcessor instances.
|
||||||
|
EGLContext sharedEglContext = AndroidTestUtil.createOpenGlObjects();
|
||||||
|
GlObjectsProvider sharedGlObjectsProvider = new DefaultGlObjectsProvider(sharedEglContext);
|
||||||
|
AtomicReference<Bitmap> compositedOutputBitmap = new AtomicReference<>();
|
||||||
|
VideoCompositor videoCompositor =
|
||||||
|
new VideoCompositor(
|
||||||
|
getApplicationContext(),
|
||||||
|
sharedGlObjectsProvider,
|
||||||
|
/* textureOutputListener= */ (outputTexture,
|
||||||
|
presentationTimeUs,
|
||||||
|
releaseOutputTextureCallback,
|
||||||
|
syncObject) -> {
|
||||||
|
try {
|
||||||
|
GlUtil.awaitSyncObject(syncObject);
|
||||||
|
compositedOutputBitmap.set(
|
||||||
|
BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(
|
||||||
|
outputTexture.getWidth(), outputTexture.getHeight()));
|
||||||
|
} catch (GlUtil.GlException e) {
|
||||||
|
throw VideoFrameProcessingException.from(e);
|
||||||
|
} finally {
|
||||||
|
releaseOutputTextureCallback.release(presentationTimeUs);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
/* textureOutputCapacity= */ 1);
|
||||||
|
TextureBitmapReader inputTextureBitmapReader1 = new TextureBitmapReader();
|
||||||
|
VideoFrameProcessorTestRunner inputVfpTestRunner1 =
|
||||||
|
getFrameProcessorTestRunnerBuilder(
|
||||||
|
testId, inputTextureBitmapReader1, videoCompositor, sharedGlObjectsProvider)
|
||||||
|
.setEffects(GRAYSCALE)
|
||||||
|
.build();
|
||||||
|
this.inputVfpTestRunner1 = inputVfpTestRunner1;
|
||||||
|
TextureBitmapReader inputTextureBitmapReader2 = new TextureBitmapReader();
|
||||||
|
VideoFrameProcessorTestRunner inputVfpTestRunner2 =
|
||||||
|
getFrameProcessorTestRunnerBuilder(
|
||||||
|
testId, inputTextureBitmapReader2, videoCompositor, sharedGlObjectsProvider)
|
||||||
|
.setEffects(ROTATE_180)
|
||||||
|
.build();
|
||||||
|
this.inputVfpTestRunner2 = inputVfpTestRunner2;
|
||||||
|
|
||||||
|
// Queue 1 input bitmap from each input VideoFrameProcessor source.
|
||||||
|
inputVfpTestRunner1.queueInputBitmap(
|
||||||
|
readBitmap(ORIGINAL_PNG_ASSET_PATH),
|
||||||
|
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
|
||||||
|
/* offsetToAddUs= */ 0,
|
||||||
|
/* frameRate= */ 1);
|
||||||
|
inputVfpTestRunner1.endFrameProcessing();
|
||||||
|
inputVfpTestRunner2.queueInputBitmap(
|
||||||
|
readBitmap(ORIGINAL_PNG_ASSET_PATH),
|
||||||
|
/* durationUs= */ 1 * C.MICROS_PER_SECOND,
|
||||||
|
/* offsetToAddUs= */ 0,
|
||||||
|
/* frameRate= */ 1);
|
||||||
|
inputVfpTestRunner2.endFrameProcessing();
|
||||||
|
|
||||||
|
// Check that VideoFrameProcessor and VideoCompositor outputs match expected bitmaps.
|
||||||
|
Bitmap actualCompositorInputBitmap1 = checkNotNull(inputTextureBitmapReader1).getBitmap();
|
||||||
|
saveAndAssertBitmapMatchesExpected(
|
||||||
|
testId,
|
||||||
|
actualCompositorInputBitmap1,
|
||||||
|
/* actualBitmapLabel= */ "actualCompositorInputBitmap1",
|
||||||
|
GRAYSCALE_PNG_ASSET_PATH);
|
||||||
|
Bitmap actualCompositorInputBitmap2 = checkNotNull(inputTextureBitmapReader2).getBitmap();
|
||||||
|
saveAndAssertBitmapMatchesExpected(
|
||||||
|
testId,
|
||||||
|
actualCompositorInputBitmap2,
|
||||||
|
/* actualBitmapLabel= */ "actualCompositorInputBitmap2",
|
||||||
|
ROTATE180_PNG_ASSET_PATH);
|
||||||
|
Bitmap compositorOutputBitmap = compositedOutputBitmap.get();
|
||||||
|
saveAndAssertBitmapMatchesExpected(
|
||||||
|
testId,
|
||||||
|
compositorOutputBitmap,
|
||||||
|
/* actualBitmapLabel= */ "compositorOutputBitmap",
|
||||||
|
GRAYSCALE_AND_ROTATE180_COMPOSITE_PNG_ASSET_PATH);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void saveAndAssertBitmapMatchesExpected(
|
||||||
|
String testId, Bitmap actualBitmap, String actualBitmapLabel, String expectedBitmapAssetPath)
|
||||||
|
throws IOException {
|
||||||
|
maybeSaveTestBitmap(testId, actualBitmapLabel, actualBitmap, /* path= */ null);
|
||||||
|
float averagePixelAbsoluteDifference =
|
||||||
|
BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888(
|
||||||
|
readBitmap(expectedBitmapAssetPath), actualBitmap, testId);
|
||||||
|
assertThat(averagePixelAbsoluteDifference)
|
||||||
|
.isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static VideoFrameProcessorTestRunner.Builder getFrameProcessorTestRunnerBuilder(
|
||||||
|
String testId,
|
||||||
|
TextureBitmapReader textureBitmapReader,
|
||||||
|
VideoCompositor videoCompositor,
|
||||||
|
GlObjectsProvider glObjectsProvider) {
|
||||||
|
int inputId = videoCompositor.registerInputSource();
|
||||||
|
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||||
|
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||||
|
.setGlObjectsProvider(glObjectsProvider)
|
||||||
|
.setTextureOutput(
|
||||||
|
/* textureOutputListener= */ (GlTextureInfo outputTexture,
|
||||||
|
long presentationTimeUs,
|
||||||
|
DefaultVideoFrameProcessor.ReleaseOutputTextureCallback
|
||||||
|
releaseOutputTextureCallback,
|
||||||
|
long syncObject) -> {
|
||||||
|
GlUtil.awaitSyncObject(syncObject);
|
||||||
|
textureBitmapReader.readBitmap(outputTexture, presentationTimeUs);
|
||||||
|
videoCompositor.queueInputTexture(
|
||||||
|
inputId, outputTexture, presentationTimeUs, releaseOutputTextureCallback);
|
||||||
|
},
|
||||||
|
/* textureOutputCapacity= */ 1)
|
||||||
|
.build();
|
||||||
|
return new VideoFrameProcessorTestRunner.Builder()
|
||||||
|
.setTestId(testId)
|
||||||
|
.setVideoFrameProcessorFactory(defaultVideoFrameProcessorFactory)
|
||||||
|
.setInputType(INPUT_TYPE_BITMAP)
|
||||||
|
.setInputColorInfo(ColorInfo.SRGB_BT709_FULL)
|
||||||
|
.setBitmapReader(textureBitmapReader);
|
||||||
|
}
|
||||||
|
}
|
@ -29,6 +29,7 @@ import androidx.media3.common.VideoFrameProcessor;
|
|||||||
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||||
import androidx.media3.test.utils.BitmapPixelTestUtil;
|
import androidx.media3.test.utils.BitmapPixelTestUtil;
|
||||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||||
|
import androidx.media3.transformer.TextureBitmapReader;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
@ -138,9 +139,12 @@ public class DefaultVideoFrameProcessorMultipleTextureOutputPixelTest {
|
|||||||
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
VideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||||
.setTextureOutput(
|
.setTextureOutput(
|
||||||
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) ->
|
(outputTexture,
|
||||||
|
presentationTimeUs,
|
||||||
|
releaseOutputTextureCallback,
|
||||||
|
unusedSyncObject) ->
|
||||||
checkNotNull(textureBitmapReader)
|
checkNotNull(textureBitmapReader)
|
||||||
.readBitmapFromTexture(
|
.readBitmapAndReleaseTexture(
|
||||||
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
||||||
/* textureOutputCapacity= */ 1)
|
/* textureOutputCapacity= */ 1)
|
||||||
.build();
|
.build();
|
||||||
|
@ -48,6 +48,7 @@ import androidx.media3.test.utils.BitmapPixelTestUtil;
|
|||||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||||
import androidx.media3.transformer.AndroidTestUtil;
|
import androidx.media3.transformer.AndroidTestUtil;
|
||||||
import androidx.media3.transformer.EncoderUtil;
|
import androidx.media3.transformer.EncoderUtil;
|
||||||
|
import androidx.media3.transformer.TextureBitmapReader;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -533,8 +534,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
|||||||
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||||
.setTextureOutput(
|
.setTextureOutput(
|
||||||
(outputTexture, presentationTimeUs1, releaseOutputTextureCallback1, token1) ->
|
(outputTexture,
|
||||||
bitmapReader.readBitmapFromTexture(
|
presentationTimeUs1,
|
||||||
|
releaseOutputTextureCallback1,
|
||||||
|
unusedSyncObject) ->
|
||||||
|
bitmapReader.readBitmapAndReleaseTexture(
|
||||||
outputTexture, presentationTimeUs1, releaseOutputTextureCallback1),
|
outputTexture, presentationTimeUs1, releaseOutputTextureCallback1),
|
||||||
/* textureOutputCapacity= */ 1)
|
/* textureOutputCapacity= */ 1)
|
||||||
.setGlObjectsProvider(contextSharingGlObjectsProvider)
|
.setGlObjectsProvider(contextSharingGlObjectsProvider)
|
||||||
@ -565,8 +569,11 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest {
|
|||||||
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
DefaultVideoFrameProcessor.Factory defaultVideoFrameProcessorFactory =
|
||||||
new DefaultVideoFrameProcessor.Factory.Builder()
|
new DefaultVideoFrameProcessor.Factory.Builder()
|
||||||
.setTextureOutput(
|
.setTextureOutput(
|
||||||
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) ->
|
(outputTexture,
|
||||||
textureBitmapReader.readBitmapFromTexture(
|
presentationTimeUs,
|
||||||
|
releaseOutputTextureCallback,
|
||||||
|
unusedSyncObject) ->
|
||||||
|
textureBitmapReader.readBitmapAndReleaseTexture(
|
||||||
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
||||||
/* textureOutputCapacity= */ 1)
|
/* textureOutputCapacity= */ 1)
|
||||||
.build();
|
.build();
|
||||||
|
@ -31,6 +31,7 @@ import androidx.media3.common.VideoFrameProcessor;
|
|||||||
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
import androidx.media3.effect.DefaultVideoFrameProcessor;
|
||||||
import androidx.media3.effect.FrameDropEffect;
|
import androidx.media3.effect.FrameDropEffect;
|
||||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||||
|
import androidx.media3.transformer.TextureBitmapReader;
|
||||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
@ -170,7 +171,7 @@ public class FrameDropPixelTest {
|
|||||||
.setTextureOutput(
|
.setTextureOutput(
|
||||||
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) ->
|
(outputTexture, presentationTimeUs, releaseOutputTextureCallback, token) ->
|
||||||
checkNotNull(textureBitmapReader)
|
checkNotNull(textureBitmapReader)
|
||||||
.readBitmapFromTexture(
|
.readBitmapAndReleaseTexture(
|
||||||
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
outputTexture, presentationTimeUs, releaseOutputTextureCallback),
|
||||||
/* textureOutputCapacity= */ 1)
|
/* textureOutputCapacity= */ 1)
|
||||||
.build();
|
.build();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user