Fix processing remaining FrameEditor input after decoder has ended.
If a) the end of stream buffer arrives with a frame rather than an empty buffer or b) processDataV29() renders several decoder output buffers to the FrameEditor's input Surface immediately before encountering the EOS flag these frames were previously stuck in the FrameEditor's input Surface and never fed to the encoder. PiperOrigin-RevId: 424898820
This commit is contained in:
parent
afc10c79eb
commit
a49a7d72e1
@ -117,7 +117,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
Image editedImage = checkNotNull(frameEditorOutputImageReader).acquireLatestImage();
|
||||
Bitmap editedBitmap = getArgb8888BitmapForRgba8888Image(editedImage);
|
||||
|
||||
// TODO(internal b/207848601): switch to using proper tooling for testing against golden data.
|
||||
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, editedBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
@ -134,7 +134,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
Image editedImage = checkNotNull(frameEditorOutputImageReader).acquireLatestImage();
|
||||
Bitmap editedBitmap = getArgb8888BitmapForRgba8888Image(editedImage);
|
||||
|
||||
// TODO(internal b/207848601): switch to using proper tooling for testing against golden
|
||||
// TODO(b/207848601): switch to using proper tooling for testing against golden
|
||||
// data.simple
|
||||
float averagePixelAbsoluteDifference =
|
||||
getAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, editedBitmap);
|
||||
@ -152,7 +152,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
Image editedImage = checkNotNull(frameEditorOutputImageReader).acquireLatestImage();
|
||||
Bitmap editedBitmap = getArgb8888BitmapForRgba8888Image(editedImage);
|
||||
|
||||
// TODO(internal b/207848601): switch to using proper tooling for testing against golden data.
|
||||
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, editedBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
@ -160,7 +160,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
|
||||
@Test
|
||||
public void processData_rotate90_producesExpectedOutput() throws Exception {
|
||||
// TODO(internal b/213190310): After creating a Presentation class, move VideoSamplePipeline
|
||||
// TODO(b/213190310): After creating a Presentation class, move VideoSamplePipeline
|
||||
// resolution-based adjustments (ex. in cl/419619743) to that Presentation class, so we can
|
||||
// test that rotation doesn't distort the image.
|
||||
Matrix rotate90Matrix = new Matrix();
|
||||
@ -172,7 +172,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
Image editedImage = checkNotNull(frameEditorOutputImageReader).acquireLatestImage();
|
||||
Bitmap editedBitmap = getArgb8888BitmapForRgba8888Image(editedImage);
|
||||
|
||||
// TODO(internal b/207848601): switch to using proper tooling for testing against golden data.
|
||||
// TODO(b/207848601): switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, editedBitmap);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
@ -206,6 +206,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
transformationMatrix,
|
||||
frameEditorOutputImageReader.getSurface(),
|
||||
Transformer.DebugViewProvider.NONE);
|
||||
frameEditor.registerInputFrame();
|
||||
|
||||
// Queue the first video frame from the extractor.
|
||||
String mimeType = checkNotNull(mediaFormat.getString(MediaFormat.KEY_MIME));
|
||||
@ -246,7 +247,7 @@ public final class FrameEditorDataProcessingTest {
|
||||
|
||||
// Sleep to give time for the surface texture to be populated.
|
||||
Thread.sleep(SURFACE_WAIT_MS);
|
||||
assertThat(frameEditor.hasInputData()).isTrue();
|
||||
assertThat(frameEditor.canProcessData()).isTrue();
|
||||
} finally {
|
||||
mediaExtractor.release();
|
||||
if (mediaCodec != null) {
|
||||
|
@ -20,6 +20,7 @@ import static androidx.media3.transformer.AndroidTestUtil.runTransformer;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
@ -32,11 +33,9 @@ import org.junit.runner.RunWith;
|
||||
*/
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class TransformerTest {
|
||||
// TODO(b/208986865): Also test this for API < 29. Currently the target emulator uses API 30.
|
||||
// VideoTranscodingSamplePipeline#processData works differently for API < 29, so both versions
|
||||
// should be tested.
|
||||
|
||||
private static final String VP9_VIDEO_URI_STRING = "asset:///media/vp9/bear-vp9.webm";
|
||||
private static final String AVC_VIDEO_URI_STRING = "asset:///media/mp4/sample.mp4";
|
||||
|
||||
@Test
|
||||
public void videoTranscoding_completesWithConsistentFrameCount() throws Exception {
|
||||
@ -64,4 +63,35 @@ public class TransformerTest {
|
||||
checkNotNull(muxerFactory.getLastFrameCountingMuxerCreated());
|
||||
assertThat(frameCountingMuxer.getFrameCount()).isEqualTo(expectedFrameCount);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void videoEditing_completesWithConsistentFrameCount() throws Exception {
|
||||
Context context = ApplicationProvider.getApplicationContext();
|
||||
Matrix transformationMatrix = new Matrix();
|
||||
transformationMatrix.postTranslate(/* dx= */ .2f, /* dy= */ .1f);
|
||||
FrameCountingMuxer.Factory muxerFactory =
|
||||
new FrameCountingMuxer.Factory(new FrameworkMuxer.Factory());
|
||||
Transformer transformer =
|
||||
new Transformer.Builder(context)
|
||||
.setTransformationRequest(
|
||||
new TransformationRequest.Builder()
|
||||
.setTransformationMatrix(transformationMatrix)
|
||||
.build())
|
||||
.setMuxerFactory(muxerFactory)
|
||||
.build();
|
||||
// Result of the following command:
|
||||
// ffprobe -count_frames -select_streams v:0 -show_entries stream=nb_read_frames sample.mp4
|
||||
int expectedFrameCount = 30;
|
||||
|
||||
runTransformer(
|
||||
context,
|
||||
/* testId= */ "videoEditing_completesWithConsistentFrameCount",
|
||||
transformer,
|
||||
AVC_VIDEO_URI_STRING,
|
||||
/* timeoutSeconds= */ 120);
|
||||
|
||||
FrameCountingMuxer frameCountingMuxer =
|
||||
checkNotNull(muxerFactory.getLastFrameCountingMuxerCreated());
|
||||
assertThat(frameCountingMuxer.getFrameCount()).isEqualTo(expectedFrameCount);
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
package androidx.media3.transformer;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
@ -193,6 +194,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
private final EGLSurface eglSurface;
|
||||
private final int textureId;
|
||||
private final AtomicInteger pendingInputFrameCount;
|
||||
private final AtomicInteger availableInputFrameCount;
|
||||
private final SurfaceTexture inputSurfaceTexture;
|
||||
private final Surface inputSurface;
|
||||
private final GlUtil.Program glProgram;
|
||||
@ -202,6 +204,8 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
private final int debugPreviewWidth;
|
||||
private final int debugPreviewHeight;
|
||||
|
||||
private boolean inputStreamEnded;
|
||||
|
||||
private FrameEditor(
|
||||
EGLDisplay eglDisplay,
|
||||
EGLContext eglContext,
|
||||
@ -219,6 +223,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
this.textureId = textureId;
|
||||
this.glProgram = glProgram;
|
||||
this.pendingInputFrameCount = new AtomicInteger();
|
||||
this.availableInputFrameCount = new AtomicInteger();
|
||||
this.outputWidth = outputWidth;
|
||||
this.outputHeight = outputHeight;
|
||||
this.debugPreviewEglSurface = debugPreviewEglSurface;
|
||||
@ -227,7 +232,10 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
textureTransformMatrix = new float[16];
|
||||
inputSurfaceTexture = new SurfaceTexture(textureId);
|
||||
inputSurfaceTexture.setOnFrameAvailableListener(
|
||||
surfaceTexture -> pendingInputFrameCount.incrementAndGet());
|
||||
surfaceTexture -> {
|
||||
checkState(pendingInputFrameCount.getAndDecrement() > 0);
|
||||
availableInputFrameCount.incrementAndGet();
|
||||
});
|
||||
inputSurface = new Surface(inputSurfaceTexture);
|
||||
}
|
||||
|
||||
@ -237,19 +245,34 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether there is pending input data that can be processed by calling {@link
|
||||
* #processData()}.
|
||||
* Informs the frame editor that a frame will be queued to its input surface.
|
||||
*
|
||||
* <p>Should be called before rendering a frame to the frame editor's input surface.
|
||||
*
|
||||
* @throws IllegalStateException If called after {@link #signalEndOfInputStream()}.
|
||||
*/
|
||||
public boolean hasInputData() {
|
||||
return pendingInputFrameCount.get() > 0;
|
||||
public void registerInputFrame() {
|
||||
checkState(!inputStreamEnded);
|
||||
pendingInputFrameCount.incrementAndGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes pending input frame.
|
||||
* Returns whether there is available input data that can be processed by calling {@link
|
||||
* #processData()}.
|
||||
*/
|
||||
public boolean canProcessData() {
|
||||
return availableInputFrameCount.get() > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes an input frame.
|
||||
*
|
||||
* @throws TransformationException If an OpenGL error occurs while processing the data.
|
||||
* @throws IllegalStateException If there is no input data to process. Use {@link
|
||||
* #canProcessData()} to check whether input data is available.
|
||||
*/
|
||||
public void processData() throws TransformationException {
|
||||
checkState(canProcessData());
|
||||
try {
|
||||
inputSurfaceTexture.updateTexImage();
|
||||
inputSurfaceTexture.getTransformMatrix(textureTransformMatrix);
|
||||
@ -260,7 +283,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
long surfaceTextureTimestampNs = inputSurfaceTexture.getTimestamp();
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, surfaceTextureTimestampNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
pendingInputFrameCount.decrementAndGet();
|
||||
|
||||
if (debugPreviewEglSurface != null) {
|
||||
focusAndDrawQuad(debugPreviewEglSurface, debugPreviewWidth, debugPreviewHeight);
|
||||
@ -270,6 +292,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
throw TransformationException.createForFrameEditor(
|
||||
e, TransformationException.ERROR_CODE_GL_PROCESSING_FAILED);
|
||||
}
|
||||
availableInputFrameCount.decrementAndGet();
|
||||
}
|
||||
|
||||
/** Releases all resources. */
|
||||
@ -287,4 +310,16 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
// The four-vertex triangle strip forms a quad.
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */ 0, /* count= */ 4);
|
||||
}
|
||||
|
||||
/** Returns whether all data has been processed. */
|
||||
public boolean isEnded() {
|
||||
return inputStreamEnded
|
||||
&& pendingInputFrameCount.get() == 0
|
||||
&& availableInputFrameCount.get() == 0;
|
||||
}
|
||||
|
||||
/** Informs the {@code FrameEditor} that no further input data should be accepted. */
|
||||
public void signalEndOfInputStream() {
|
||||
inputStreamEnded = true;
|
||||
}
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
@Override
|
||||
public boolean processData() throws TransformationException {
|
||||
if (decoder.isEnded()) {
|
||||
if (hasProcessedAllInputData()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -187,31 +187,30 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
@RequiresApi(29)
|
||||
private boolean processDataV29() throws TransformationException {
|
||||
if (frameEditor != null) {
|
||||
while (frameEditor.hasInputData()) {
|
||||
// Processes as much frames in one invocation: FrameEditor's output surface will block
|
||||
// FrameEditor when it's full. There will be no frame drop, or FrameEditor's output surface
|
||||
// growing out of bound.
|
||||
// Processes as many frames as possible. FrameEditor's output surface will block when it's
|
||||
// full, so there will be no frame drop and the surface will not grow out of bound.
|
||||
while (frameEditor.canProcessData()) {
|
||||
frameEditor.processData();
|
||||
}
|
||||
}
|
||||
|
||||
while (decoder.getOutputBufferInfo() != null) {
|
||||
if (frameEditor != null) {
|
||||
frameEditor.registerInputFrame();
|
||||
}
|
||||
decoder.releaseOutputBuffer(/* render= */ true);
|
||||
}
|
||||
|
||||
if (decoder.isEnded()) {
|
||||
// TODO(b/208986865): Handle possible last frame drop.
|
||||
encoder.signalEndOfInputStream();
|
||||
return false;
|
||||
signalEndOfInputStream();
|
||||
}
|
||||
|
||||
return frameEditor != null && frameEditor.hasInputData();
|
||||
return frameEditor != null && frameEditor.canProcessData();
|
||||
}
|
||||
|
||||
/** Processes input data. */
|
||||
private boolean processDataDefault() throws TransformationException {
|
||||
if (frameEditor != null) {
|
||||
if (frameEditor.hasInputData()) {
|
||||
if (frameEditor.canProcessData()) {
|
||||
waitingForFrameEditorInput = false;
|
||||
frameEditor.processData();
|
||||
return true;
|
||||
@ -223,11 +222,14 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
boolean decoderHasOutputBuffer = decoder.getOutputBufferInfo() != null;
|
||||
if (decoderHasOutputBuffer) {
|
||||
if (frameEditor != null) {
|
||||
frameEditor.registerInputFrame();
|
||||
waitingForFrameEditorInput = true;
|
||||
}
|
||||
decoder.releaseOutputBuffer(/* render= */ true);
|
||||
waitingForFrameEditorInput = frameEditor != null;
|
||||
}
|
||||
if (decoder.isEnded()) {
|
||||
encoder.signalEndOfInputStream();
|
||||
signalEndOfInputStream();
|
||||
return false;
|
||||
}
|
||||
return decoderHasOutputBuffer && !waitingForFrameEditorInput;
|
||||
@ -293,4 +295,17 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
.setResolution(resolutionIsHeight ? requestedFormat.height : requestedFormat.width)
|
||||
.build();
|
||||
}
|
||||
|
||||
private boolean hasProcessedAllInputData() {
|
||||
return decoder.isEnded() && (frameEditor == null || frameEditor.isEnded());
|
||||
}
|
||||
|
||||
private void signalEndOfInputStream() throws TransformationException {
|
||||
if (frameEditor != null) {
|
||||
frameEditor.signalEndOfInputStream();
|
||||
}
|
||||
if (frameEditor == null || frameEditor.isEnded()) {
|
||||
encoder.signalEndOfInputStream();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user