Speed up image to video Export

Only sample from input bitmap when the input image has changed.
Introduce GainmapShaderProgram.newImmutableBitmap API that signals
input bitmap changes to GainmapShaderProgram (DefaultShaderProgram).

PiperOrigin-RevId: 637920207
This commit is contained in:
dancho 2024-05-28 09:09:48 -07:00 committed by Copybara-Service
parent 3d8b5811b4
commit 02df88e5d9
9 changed files with 187 additions and 46 deletions

View File

@ -47,7 +47,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
private final Queue<BitmapFrameSequenceInfo> pendingBitmaps; private final Queue<BitmapFrameSequenceInfo> pendingBitmaps;
private final GlObjectsProvider glObjectsProvider; private final GlObjectsProvider glObjectsProvider;
private @MonotonicNonNull GainmapShaderProgram gainmapShaderProgram; private @MonotonicNonNull RepeatingGainmapShaderProgram repeatingGainmapShaderProgram;
@Nullable private GlTextureInfo currentSdrGlTextureInfo; @Nullable private GlTextureInfo currentSdrGlTextureInfo;
private int downstreamShaderProgramCapacity; private int downstreamShaderProgramCapacity;
private boolean currentInputStreamEnded; private boolean currentInputStreamEnded;
@ -71,13 +71,13 @@ import org.checkerframework.checker.nullness.qual.Nullable;
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
* <p>{@link GlShaderProgram} must be a {@link GainmapShaderProgram}. * <p>{@link GlShaderProgram} must be a {@link RepeatingGainmapShaderProgram}.
*/ */
@Override @Override
public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) { public void setSamplingGlShaderProgram(GlShaderProgram samplingGlShaderProgram) {
checkState(samplingGlShaderProgram instanceof GainmapShaderProgram); checkState(samplingGlShaderProgram instanceof RepeatingGainmapShaderProgram);
downstreamShaderProgramCapacity = 0; downstreamShaderProgramCapacity = 0;
this.gainmapShaderProgram = (GainmapShaderProgram) samplingGlShaderProgram; this.repeatingGainmapShaderProgram = (RepeatingGainmapShaderProgram) samplingGlShaderProgram;
} }
@Override @Override
@ -110,7 +110,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
if (pendingBitmaps.isEmpty()) { if (pendingBitmaps.isEmpty()) {
checkNotNull(gainmapShaderProgram).signalEndOfCurrentInputStream(); checkNotNull(repeatingGainmapShaderProgram).signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
COMPONENT_BITMAP_TEXTURE_MANAGER, EVENT_SIGNAL_EOS, C.TIME_END_OF_SOURCE); COMPONENT_BITMAP_TEXTURE_MANAGER, EVENT_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
} else { } else {
@ -155,7 +155,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
} }
downstreamShaderProgramCapacity--; downstreamShaderProgramCapacity--;
checkNotNull(gainmapShaderProgram) checkNotNull(repeatingGainmapShaderProgram)
.queueInputFrame( .queueInputFrame(
glObjectsProvider, checkNotNull(currentSdrGlTextureInfo), currentPresentationTimeUs); glObjectsProvider, checkNotNull(currentSdrGlTextureInfo), currentPresentationTimeUs);
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
@ -172,7 +172,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
finishedBitmapInfo.bitmap.recycle(); finishedBitmapInfo.bitmap.recycle();
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) { if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
// Only signal end of stream after all pending bitmaps are processed. // Only signal end of stream after all pending bitmaps are processed.
checkNotNull(gainmapShaderProgram).signalEndOfCurrentInputStream(); checkNotNull(repeatingGainmapShaderProgram).signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
COMPONENT_BITMAP_TEXTURE_MANAGER, EVENT_SIGNAL_EOS, C.TIME_END_OF_SOURCE); COMPONENT_BITMAP_TEXTURE_MANAGER, EVENT_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
currentInputStreamEnded = false; currentInputStreamEnded = false;
@ -213,8 +213,9 @@ import org.checkerframework.checker.nullness.qual.Nullable;
frameInfo.width, frameInfo.width,
frameInfo.height); frameInfo.height);
if (Util.SDK_INT >= 34 && bitmap.hasGainmap()) { if (Util.SDK_INT >= 34 && bitmap.hasGainmap()) {
checkNotNull(gainmapShaderProgram).setGainmap(checkNotNull(bitmap.getGainmap())); checkNotNull(repeatingGainmapShaderProgram).setGainmap(checkNotNull(bitmap.getGainmap()));
} }
checkNotNull(repeatingGainmapShaderProgram).signalNewRepeatingFrameSequence();
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e); throw VideoFrameProcessingException.from(e);
} }

View File

@ -61,7 +61,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/ */
@SuppressWarnings("FunctionalInterfaceClash") // b/228192298 @SuppressWarnings("FunctionalInterfaceClash") // b/228192298
/* package */ final class DefaultShaderProgram extends BaseGlShaderProgram /* package */ final class DefaultShaderProgram extends BaseGlShaderProgram
implements ExternalShaderProgram, GainmapShaderProgram { implements ExternalShaderProgram, RepeatingGainmapShaderProgram {
private static final String VERTEX_SHADER_TRANSFORMATION_PATH = private static final String VERTEX_SHADER_TRANSFORMATION_PATH =
"shaders/vertex_shader_transformation_es2.glsl"; "shaders/vertex_shader_transformation_es2.glsl";
@ -153,6 +153,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private @MonotonicNonNull Gainmap lastGainmap; private @MonotonicNonNull Gainmap lastGainmap;
private int gainmapTexId; private int gainmapTexId;
private @C.ColorTransfer int outputColorTransfer; private @C.ColorTransfer int outputColorTransfer;
private boolean shouldRepeatLastFrame;
private boolean isRepeatingFrameDrawn;
/** /**
* Creates a new instance. * Creates a new instance.
@ -501,12 +503,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void drawFrame(int inputTexId, long presentationTimeUs) public void drawFrame(int inputTexId, long presentationTimeUs)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
updateCompositeRgbMatrixArray(presentationTimeUs); boolean compositeRgbMatrixArrayChanged = updateCompositeRgbMatrixArray(presentationTimeUs);
boolean compositeTransformationMatrixAndVisiblePolygonChanged =
updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs); updateCompositeTransformationMatrixAndVisiblePolygon(presentationTimeUs);
boolean uniformsChanged =
compositeRgbMatrixArrayChanged || compositeTransformationMatrixAndVisiblePolygonChanged;
if (visiblePolygon.size() < 3) { if (visiblePolygon.size() < 3) {
return; // Need at least three visible vertices for a triangle. return; // Need at least three visible vertices for a triangle.
} }
if (shouldRepeatLastFrame && !uniformsChanged && isRepeatingFrameDrawn) {
return;
}
try { try {
glProgram.use(); glProgram.use();
setGainmapSamplerAndUniforms(); setGainmapSamplerAndUniforms();
@ -524,6 +532,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} catch (GlUtil.GlException e) { } catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e, presentationTimeUs); throw new VideoFrameProcessingException(e, presentationTimeUs);
} }
isRepeatingFrameDrawn = true;
} }
@Override @Override
@ -553,6 +562,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (lastGainmap != null && GainmapUtil.equals(this.lastGainmap, gainmap)) { if (lastGainmap != null && GainmapUtil.equals(this.lastGainmap, gainmap)) {
return; return;
} }
isRepeatingFrameDrawn = false;
this.lastGainmap = gainmap; this.lastGainmap = gainmap;
if (gainmapTexId == C.INDEX_UNSET) { if (gainmapTexId == C.INDEX_UNSET) {
gainmapTexId = GlUtil.createTexture(gainmap.getGainmapContents()); gainmapTexId = GlUtil.createTexture(gainmap.getGainmapContents());
@ -561,6 +571,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
@Override
public void signalNewRepeatingFrameSequence() {
// Skipping drawFrame() is only allowed if there's only one possible output texture.
checkState(outputTexturePool.capacity() == 1);
shouldRepeatLastFrame = true;
isRepeatingFrameDrawn = false;
}
@Override
public boolean shouldClearTextureBuffer() {
return !(isRepeatingFrameDrawn && shouldRepeatLastFrame);
}
/** /**
* Sets the output {@link C.ColorTransfer}. * Sets the output {@link C.ColorTransfer}.
* *
@ -581,8 +604,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* Updates {@link #compositeTransformationMatrixArray} and {@link #visiblePolygon} based on the * Updates {@link #compositeTransformationMatrixArray} and {@link #visiblePolygon} based on the
* given frame timestamp. * given frame timestamp.
*
* <p>Returns whether the transformation matrix or visible polygon has changed.
*/ */
private void updateCompositeTransformationMatrixAndVisiblePolygon(long presentationTimeUs) { private boolean updateCompositeTransformationMatrixAndVisiblePolygon(long presentationTimeUs) {
float[][] matricesAtPresentationTime = new float[matrixTransformations.size()][16]; float[][] matricesAtPresentationTime = new float[matrixTransformations.size()][16];
for (int i = 0; i < matrixTransformations.size(); i++) { for (int i = 0; i < matrixTransformations.size(); i++) {
matricesAtPresentationTime[i] = matricesAtPresentationTime[i] =
@ -590,7 +615,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
if (!updateMatrixCache(transformationMatrixCache, matricesAtPresentationTime)) { if (!updateMatrixCache(transformationMatrixCache, matricesAtPresentationTime)) {
return; return false;
} }
// Compute the compositeTransformationMatrix and transform and clip the visiblePolygon for each // Compute the compositeTransformationMatrix and transform and clip the visiblePolygon for each
@ -616,7 +641,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
MatrixUtils.transformPoints(transformationMatrix, visiblePolygon)); MatrixUtils.transformPoints(transformationMatrix, visiblePolygon));
if (visiblePolygon.size() < 3) { if (visiblePolygon.size() < 3) {
// Can ignore remaining matrices as there are not enough vertices left to form a polygon. // Can ignore remaining matrices as there are not enough vertices left to form a polygon.
return; return true;
} }
} }
// Calculate the input frame vertices corresponding to the output frame's visible polygon. // Calculate the input frame vertices corresponding to the output frame's visible polygon.
@ -626,17 +651,22 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
compositeTransformationMatrixArray, compositeTransformationMatrixArray,
/* mOffset= */ 0); /* mOffset= */ 0);
visiblePolygon = MatrixUtils.transformPoints(tempResultMatrix, visiblePolygon); visiblePolygon = MatrixUtils.transformPoints(tempResultMatrix, visiblePolygon);
return true;
} }
/** Updates {@link #compositeRgbMatrixArray} based on the given frame timestamp. */ /**
private void updateCompositeRgbMatrixArray(long presentationTimeUs) { * Updates {@link #compositeRgbMatrixArray} based on the given frame timestamp.
*
* <p>Returns whether the {@link #compositeRgbMatrixArray} has changed.
*/
private boolean updateCompositeRgbMatrixArray(long presentationTimeUs) {
float[][] matricesCurrTimestamp = new float[rgbMatrices.size()][16]; float[][] matricesCurrTimestamp = new float[rgbMatrices.size()][16];
for (int i = 0; i < rgbMatrices.size(); i++) { for (int i = 0; i < rgbMatrices.size(); i++) {
matricesCurrTimestamp[i] = rgbMatrices.get(i).getMatrix(presentationTimeUs, useHdr); matricesCurrTimestamp[i] = rgbMatrices.get(i).getMatrix(presentationTimeUs, useHdr);
} }
if (!updateMatrixCache(rgbMatrixCache, matricesCurrTimestamp)) { if (!updateMatrixCache(rgbMatrixCache, matricesCurrTimestamp)) {
return; return false;
} }
GlUtil.setToIdentity(compositeRgbMatrixArray); GlUtil.setToIdentity(compositeRgbMatrixArray);
@ -656,6 +686,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/* destPost= */ 0, /* destPost= */ 0,
/* length= */ tempResultMatrix.length); /* length= */ tempResultMatrix.length);
} }
return true;
} }
/** /**

View File

@ -0,0 +1,29 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
/** Interface for a {@link GlShaderProgram} that can repeat an input frame. */
/* package */ interface RepeatingFrameShaderProgram extends GlShaderProgram {
/**
* Signals that the frame contents will change in the next call to {@link
* GlShaderProgram#queueInputFrame}.
*
* <p>This class can assume that the input frame contents are unchanged until the next call to
* this method.
*/
void signalNewRepeatingFrameSequence();
}

View File

@ -0,0 +1,19 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.effect;
/* package */ interface RepeatingGainmapShaderProgram
extends RepeatingFrameShaderProgram, GainmapShaderProgram {}

View File

@ -138,17 +138,17 @@ public final class SequenceEffectTestUtil {
} }
/** /**
* Asserts that the first frame extracted from the video in filePath matches output in {@link * Asserts that the first {@code frameCount} frames extracted from the video in {@code filePath}
* #PNG_ASSET_BASE_PATH}/{@code testId}_0.png. * match the expected output in {@link #PNG_ASSET_BASE_PATH}/{@code testId}_num.png.
* *
* <p>Also saves the first frame as a bitmap, in case they differ from expected. * <p>Also saves the first frame as a bitmap, in case they differ from expected.
*/ */
public static void assertFirstFrameMatchesExpectedPsnrAndSave( public static void assertFramesMatchExpectedPsnrAndSave(
Context context, String testId, String filePath, float psnrThreshold) Context context, String testId, String filePath, float psnrThreshold, int frameCount)
throws IOException, InterruptedException { throws IOException, InterruptedException {
Bitmap firstEncodedFrame = extractBitmapsFromVideo(context, filePath).get(0); ImmutableList<Bitmap> frames =
assertBitmapsMatchExpectedPsnrAndSave( extractBitmapsFromVideo(context, filePath).subList(0, frameCount);
ImmutableList.of(firstEncodedFrame), testId, psnrThreshold); assertBitmapsMatchExpectedPsnrAndSave(frames, testId, psnrThreshold);
} }
private static void assertBitmapsMatchExpectedPsnrAndSave( private static void assertBitmapsMatchExpectedPsnrAndSave(

View File

@ -43,7 +43,7 @@ import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD;
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD; import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS; import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave; import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertFirstFrameMatchesExpectedPsnrAndSave; import static androidx.media3.transformer.SequenceEffectTestUtil.assertFramesMatchExpectedPsnrAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo; import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition; import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.SequenceEffectTestUtil.decoderProducesWashedOutColours; import static androidx.media3.transformer.SequenceEffectTestUtil.decoderProducesWashedOutColours;
@ -175,8 +175,8 @@ public final class TransformerSequenceEffectTest {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }
@ -215,8 +215,8 @@ public final class TransformerSequenceEffectTest {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }
@ -253,8 +253,8 @@ public final class TransformerSequenceEffectTest {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }
@ -294,8 +294,8 @@ public final class TransformerSequenceEffectTest {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }
@ -336,8 +336,8 @@ public final class TransformerSequenceEffectTest {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
@ -390,12 +390,15 @@ public final class TransformerSequenceEffectTest {
assertThat(new File(result.filePath).length()).isGreaterThan(0); assertThat(new File(result.filePath).length()).isGreaterThan(0);
// The PSNR threshold was chosen based on: // The PSNR threshold was chosen based on:
// Pixel 8 with coordinate rounding error during texture sampling, hits PSNR 23.4. With fix -> // Pixel 8 with coordinate rounding error during texture sampling, gets PSNR 23.4.
// 29.5 // After fix -> 29.5
// Realmi C11 with bug fix hits PSNR 29.94 // rmx3563 with bug fix achieves PSNR 28.8
// rmx3563 -> 28.8 assertFramesMatchExpectedPsnrAndSave(
assertFirstFrameMatchesExpectedPsnrAndSave( context,
context, testId, checkNotNull(result.filePath), 28.5f); testId,
checkNotNull(result.filePath),
/* psnrThreshold= */ 28.5f,
/* frameCount= */ 2);
} }
@Test @Test

View File

@ -15,21 +15,30 @@
*/ */
package androidx.media3.transformer.mh; package androidx.media3.transformer.mh;
import static androidx.media3.common.MimeTypes.VIDEO_H264;
import static androidx.media3.transformer.AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING; import static androidx.media3.transformer.AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.ULTRA_HDR_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported; import static androidx.media3.transformer.AndroidTestUtil.assumeFormatsSupported;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import android.content.Context; import android.content.Context;
import android.net.Uri; import android.net.Uri;
import android.os.Build;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Util;
import androidx.media3.effect.Presentation;
import androidx.media3.transformer.AndroidTestUtil; import androidx.media3.transformer.AndroidTestUtil;
import androidx.media3.transformer.EditedMediaItem; import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.ExportTestResult; import androidx.media3.transformer.ExportTestResult;
import androidx.media3.transformer.Transformer; import androidx.media3.transformer.Transformer;
import androidx.media3.transformer.TransformerAndroidTestRunner; import androidx.media3.transformer.TransformerAndroidTestRunner;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableList;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -77,4 +86,53 @@ public class TranscodeSpeedTest {
assertThat(result.throughputFps).isAtLeast(20); assertThat(result.throughputFps).isAtLeast(20);
} }
@Test
public void exportImage_to720p_completesWithHighThroughput() throws Exception {
Context context = ApplicationProvider.getApplicationContext();
Format outputFormat =
new Format.Builder()
.setSampleMimeType(VIDEO_H264)
.setFrameRate(30.00f)
.setCodecs("avc1.42C028")
.setWidth(1280)
.setHeight(720)
.build();
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ AndroidTestUtil.MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS_FORMAT,
outputFormat);
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
boolean isHighPerformance = Util.SDK_INT >= 31 && Build.SOC_MODEL.startsWith("Tensor");
if (Util.SDK_INT == 33 && Ascii.toLowerCase(Util.MODEL).contains("pixel 6")) {
// Pixel 6 is usually quick, unless it's on API 33.
isHighPerformance = false;
}
// This test uses ULTRA_HDR_URI_STRING because it's high resolution.
// Ultra HDR gainmap is ignored.
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(ULTRA_HDR_URI_STRING))
.setFrameRate(30)
.setDurationUs(isHighPerformance ? 45_000_000 : 15_000_000)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(
Presentation.createForWidthAndHeight(
720, 1280, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
// This test depends on device GPU performance. Sampling high-resolution textures
// is expensive. If an extra shader program runs on each frame, devices with slow GPU
// such as moto e5 play will drop to 5 fps.
// Devices with a fast GPU and encoder will drop under 300 fps.
assertThat(result.throughputFps).isAtLeast(isHighPerformance ? 400 : 20);
}
} }

View File

@ -31,7 +31,7 @@ import static androidx.media3.transformer.SequenceEffectTestUtil.NO_EFFECT;
import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD; import static androidx.media3.transformer.SequenceEffectTestUtil.PSNR_THRESHOLD_HD;
import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS; import static androidx.media3.transformer.SequenceEffectTestUtil.SINGLE_30_FPS_VIDEO_FRAME_THRESHOLD_MS;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave; import static androidx.media3.transformer.SequenceEffectTestUtil.assertBitmapsMatchExpectedAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.assertFirstFrameMatchesExpectedPsnrAndSave; import static androidx.media3.transformer.SequenceEffectTestUtil.assertFramesMatchExpectedPsnrAndSave;
import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo; import static androidx.media3.transformer.SequenceEffectTestUtil.clippedVideo;
import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition; import static androidx.media3.transformer.SequenceEffectTestUtil.createComposition;
import static androidx.media3.transformer.SequenceEffectTestUtil.tryToExportCompositionWithDecoder; import static androidx.media3.transformer.SequenceEffectTestUtil.tryToExportCompositionWithDecoder;
@ -226,8 +226,8 @@ public final class TransformerSequenceEffectTestWithHdr {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull(); assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }
@ -261,8 +261,8 @@ public final class TransformerSequenceEffectTestWithHdr {
atLeastOneDecoderSucceeds = true; atLeastOneDecoderSucceeds = true;
assertThat(checkNotNull(result).filePath).isNotNull(); assertThat(checkNotNull(result).filePath).isNotNull();
assertFirstFrameMatchesExpectedPsnrAndSave( assertFramesMatchExpectedPsnrAndSave(
context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD); context, testId, checkNotNull(result.filePath), PSNR_THRESHOLD_HD, /* frameCount= */ 1);
} }
assertThat(atLeastOneDecoderSucceeds).isTrue(); assertThat(atLeastOneDecoderSucceeds).isTrue();
} }