Transformer: Support Texture asset loading

PiperOrigin-RevId: 530888319
This commit is contained in:
tofunmi 2023-05-10 13:10:49 +00:00 committed by Tofunmi Adigun-Hameed
parent 97b65bcf67
commit 7fea435b7e
7 changed files with 568 additions and 8 deletions

View File

@ -22,18 +22,26 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Build;
import android.util.Pair;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.MediaFormatUtil;
import androidx.media3.common.util.Util;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil;
import com.google.common.collect.ImmutableList;
import java.io.File;
@ -507,6 +515,37 @@ public final class AndroidTestUtil {
public static final String MP3_ASSET_URI_STRING = "asset:///media/mp3/test.mp3";
/**
* Creates the GL objects needed to set up a GL environment including an {@link EGLDisplay} and an
* {@link EGLContext}.
*/
public static EGLContext createOpenGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.createEglDisplay();
int[] configAttributes = GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
EGLContext eglContext =
glObjectsProvider.createEglContext(eglDisplay, /* openGlVersion= */ 2, configAttributes);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
return eglContext;
}
/**
* Generates a {@linkplain android.opengl.GLES10#GL_TEXTURE_2D traditional GLES texture} from the
* given bitmap.
*
* <p>Must have a GL context set up.
*/
public static int generateTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException {
int texId =
GlUtil.createTexture(
bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
return texId;
}
/**
* Log in logcat and in an analysis file that this test was skipped.
*

View File

@ -15,23 +15,38 @@
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP3_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects;
import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import android.content.Context;
import android.graphics.Bitmap;
import android.net.Uri;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.audio.SonicAudioProcessor;
import androidx.media3.common.util.GlUtil;
import androidx.media3.datasource.DataSourceBitmapLoader;
import androidx.media3.effect.Contrast;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.FrameCache;
import androidx.media3.effect.Presentation;
import androidx.media3.effect.RgbFilter;
@ -39,6 +54,7 @@ import androidx.media3.effect.TimestampWrapper;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -50,6 +66,7 @@ import org.junit.runner.RunWith;
public class TransformerEndToEndTest {
private final Context context = ApplicationProvider.getApplicationContext();
private volatile @MonotonicNonNull TextureAssetLoader textureAssetLoader;
@Test
public void videoEditing_withImageInput_completesWithCorrectFrameCountAndDuration()
@ -98,6 +115,118 @@ public class TransformerEndToEndTest {
.isEqualTo((C.MILLIS_PER_SECOND / expectedFrameCount) * (expectedFrameCount - 1));
}
@Test
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
videoEffects,
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};
textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}
@Test
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(),
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};
textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}
@Test
public void videoEditing_completesWithConsistentFrameCount() throws Exception {
Transformer transformer =
@ -366,6 +495,34 @@ public class TransformerEndToEndTest {
assertThat(result.exportResult.durationMs).isEqualTo(3100);
}
private final class TestTextureAssetLoaderFactory implements AssetLoader.Factory {
private final int width;
private final int height;
TestTextureAssetLoaderFactory(int width, int height) {
this.width = width;
this.height = height;
}
@Override
public TextureAssetLoader createAssetLoader(
EditedMediaItem editedMediaItem, Looper looper, AssetLoader.Listener listener) {
Format format = new Format.Builder().setWidth(width).setHeight(height).build();
OnInputFrameProcessedListener frameProcessedListener =
texId -> {
try {
GlUtil.deleteTexture(texId);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
};
textureAssetLoader =
new TextureAssetLoader(editedMediaItem, listener, format, frameProcessedListener);
return textureAssetLoader;
}
}
private static final class VideoUnsupportedEncoderFactory implements Codec.EncoderFactory {
private final Codec.EncoderFactory encoderFactory;

View File

@ -19,6 +19,7 @@ import android.graphics.Bitmap;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.decoder.DecoderInputBuffer;
@ -82,6 +83,31 @@ public interface SampleConsumer {
// Methods to pass raw video input.
/**
* Provides a {@link OnInputFrameProcessedListener} to the consumer.
*
* <p>Should only be used for raw video data when input is provided by texture ID.
*
* @param listener The {@link OnInputFrameProcessedListener}.
*/
default void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
throw new UnsupportedOperationException();
}
/**
* Attempts to provide an input texture to the consumer.
*
* <p>Should only be used for raw video data.
*
* @param texId The ID of the texture to queue to the consumer.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
default boolean queueInputTexture(int texId, long presentationTimeUs) {
throw new UnsupportedOperationException();
}
/**
* Returns the input {@link Surface}, where the consumer reads input frames from.
*

View File

@ -32,6 +32,7 @@ import androidx.media3.common.ColorInfo;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.decoder.DecoderInputBuffer;
@ -422,6 +423,24 @@ import java.util.concurrent.atomic.AtomicInteger;
return sampleConsumer.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
long globalTimestampUs = totalDurationUs + presentationTimeUs;
if (isLooping && globalTimestampUs >= maxSequenceDurationUs) {
if (isMaxSequenceDurationUsFinal && !videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
return sampleConsumer.queueInputTexture(texId, presentationTimeUs);
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
sampleConsumer.setOnInputFrameProcessedListener(listener);
}
@Override
public Surface getInputSurface() {
return sampleConsumer.getInputSurface();

View File

@ -0,0 +1,151 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static java.lang.Math.round;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.util.UnstableApi;
import com.google.common.collect.ImmutableMap;
/**
* An {@link AssetLoader} implementation that loads videos from {@linkplain
* android.opengl.GLES10#GL_TEXTURE_2D traditional GLES texture} instances.
*
* <p>Typically instantiated in a custom {@link AssetLoader.Factory} saving a reference to the
* created {@link TextureAssetLoader}. Input is provided calling {@link #queueInputTexture} to
* provide all the video frames, then {@link #signalEndOfVideoInput() signalling the end of input}
* when finished.
*/
@UnstableApi
public final class TextureAssetLoader implements AssetLoader {
private final EditedMediaItem editedMediaItem;
private final Listener assetLoaderListener;
private final Format format;
private final OnInputFrameProcessedListener frameProcessedListener;
@Nullable private SampleConsumer sampleConsumer;
private @Transformer.ProgressState int progressState;
private long lastQueuedPresentationTimeUs;
private boolean isTrackAdded;
/**
* Creates an instance.
*
* <p>The {@link EditedMediaItem#durationUs}, {@link Format#width} and {@link Format#height} must
* be set.
*/
public TextureAssetLoader(
EditedMediaItem editedMediaItem,
Listener assetLoaderListener,
Format format,
OnInputFrameProcessedListener frameProcessedListener) {
checkArgument(editedMediaItem.durationUs != C.TIME_UNSET);
checkArgument(format.height != Format.NO_VALUE && format.width != Format.NO_VALUE);
this.editedMediaItem = editedMediaItem;
this.assetLoaderListener = assetLoaderListener;
this.format = format.buildUpon().setSampleMimeType(MimeTypes.VIDEO_RAW).build();
this.frameProcessedListener = frameProcessedListener;
progressState = PROGRESS_STATE_NOT_STARTED;
}
@Override
public void start() {
progressState = PROGRESS_STATE_AVAILABLE;
assetLoaderListener.onDurationUs(editedMediaItem.durationUs);
assetLoaderListener.onTrackCount(1);
}
@Override
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
if (progressState == PROGRESS_STATE_AVAILABLE) {
progressHolder.progress =
round((lastQueuedPresentationTimeUs / (float) editedMediaItem.durationUs) * 100);
}
return progressState;
}
@Override
public ImmutableMap<Integer, String> getDecoderNames() {
return ImmutableMap.of();
}
@Override
public void release() {
isTrackAdded = false;
progressState = PROGRESS_STATE_NOT_STARTED;
sampleConsumer = null;
}
/**
* Attempts to provide an input texture.
*
* <p>Must be called on the same thread as {@link #signalEndOfVideoInput}.
*
* @param texId The ID of the texture to queue.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
public boolean queueInputTexture(int texId, long presentationTimeUs) {
try {
if (!isTrackAdded) {
assetLoaderListener.onTrackAdded(format, SUPPORTED_OUTPUT_TYPE_DECODED);
isTrackAdded = true;
}
if (sampleConsumer == null) {
sampleConsumer = assetLoaderListener.onOutputFormat(format);
if (sampleConsumer == null) {
return false;
} else {
sampleConsumer.setOnInputFrameProcessedListener(frameProcessedListener);
}
}
if (!sampleConsumer.queueInputTexture(texId, presentationTimeUs)) {
return false;
}
lastQueuedPresentationTimeUs = presentationTimeUs;
return true;
} catch (ExportException e) {
assetLoaderListener.onError(e);
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
return false;
}
/**
* Signals that no further input frames will be rendered.
*
* <p>Must be called on the same thread as {@link #queueInputTexture}.
*/
public void signalEndOfVideoInput() {
try {
checkNotNull(sampleConsumer).signalEndOfVideoInput();
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
}
}

View File

@ -21,6 +21,7 @@ import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
import static androidx.media3.common.ColorInfo.isTransferHdr;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.EncoderUtil.getSupportedEncodersForHdrEditing;
@ -46,6 +47,7 @@ import androidx.media3.common.MimeTypes;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size;
@ -213,14 +215,8 @@ import org.checkerframework.dataflow.qual.Pure;
boolean isLast) {
if (trackFormat != null) {
Size decodedSize = getDecodedSize(trackFormat);
String mimeType = checkNotNull(trackFormat.sampleMimeType);
if (MimeTypes.isVideo(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_SURFACE);
} else if (MimeTypes.isImage(mimeType)) {
videoFrameProcessor.registerInputStream(INPUT_TYPE_BITMAP);
} else {
throw new IllegalArgumentException("MIME type not supported " + mimeType);
}
videoFrameProcessor.registerInputStream(
getInputType(checkNotNull(trackFormat.sampleMimeType)));
videoFrameProcessor.setInputFrameInfo(
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
@ -236,6 +232,17 @@ import org.checkerframework.dataflow.qual.Pure;
return true;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
}
@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
videoFrameProcessor.queueInputTexture(texId, presentationTimeUs);
return true;
}
@Override
public Surface getInputSurface() {
return videoFrameProcessor.getInputSurface();
@ -308,6 +315,19 @@ import org.checkerframework.dataflow.qual.Pure;
return encoderWrapper.isEnded();
}
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
if (MimeTypes.isImage(sampleMimeType)) {
return INPUT_TYPE_BITMAP;
}
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
return INPUT_TYPE_TEXTURE_ID;
}
if (MimeTypes.isVideo(sampleMimeType)) {
return INPUT_TYPE_SURFACE;
}
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
}
private static Size getDecodedSize(Format format) {
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;

View File

@ -0,0 +1,148 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.test.utils.robolectric.RobolectricUtil.runLooperUntil;
import static com.google.common.truth.Truth.assertThat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.VideoFrameProcessor.OnInputFrameProcessedListener;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowSystemClock;
/** Unit tests for {@link TextureAssetLoader}. */
@RunWith(AndroidJUnit4.class)
public class TextureAssetLoaderTest {
@Test
public void textureAssetLoader_callsListenerCallbacksInRightOrder() throws Exception {
HandlerThread assetLoaderThread = new HandlerThread("AssetLoaderThread");
assetLoaderThread.start();
Looper assetLoaderLooper = assetLoaderThread.getLooper();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
AtomicBoolean isOutputFormatSet = new AtomicBoolean();
AssetLoader.Listener listener =
new AssetLoader.Listener() {
private volatile boolean isDurationSet;
private volatile boolean isTrackCountSet;
private volatile boolean isTrackAdded;
@Override
public void onDurationUs(long durationUs) {
// Sleep to increase the chances of the test failing.
sleep();
isDurationSet = true;
}
@Override
public void onTrackCount(int trackCount) {
// Sleep to increase the chances of the test failing.
sleep();
isTrackCountSet = true;
}
@Override
public boolean onTrackAdded(
Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
if (!isDurationSet) {
exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onDurationUs()"));
} else if (!isTrackCountSet) {
exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onTrackCount()"));
}
sleep();
isTrackAdded = true;
return false;
}
@Override
public SampleConsumer onOutputFormat(Format format) {
if (!isTrackAdded) {
exceptionRef.set(
new IllegalStateException("onOutputFormat() called before onTrackAdded()"));
}
isOutputFormatSet.set(true);
return new FakeSampleConsumer();
}
@Override
public void onError(ExportException e) {
exceptionRef.set(e);
}
private void sleep() {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
exceptionRef.set(e);
}
}
};
TextureAssetLoader assetLoader = getAssetLoader(listener);
new Handler(assetLoaderLooper).post(() -> runTextureAssetLoader(assetLoader));
runLooperUntil(
Looper.myLooper(),
() -> {
ShadowSystemClock.advanceBy(Duration.ofMillis(10));
return isOutputFormatSet.get() || exceptionRef.get() != null;
});
assertThat(exceptionRef.get()).isNull();
}
private static void runTextureAssetLoader(TextureAssetLoader assetLoader) {
assetLoader.start();
assetLoader.queueInputTexture(/* texId= */ 0, /* presentationTimeUs= */ 0);
assetLoader.signalEndOfVideoInput();
}
private static TextureAssetLoader getAssetLoader(AssetLoader.Listener listener) {
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(new MediaItem.Builder().build())
.setDurationUs(C.MICROS_PER_SECOND)
.build();
Format format = new Format.Builder().setWidth(10).setHeight(10).build();
OnInputFrameProcessedListener frameProcessedListener = unused -> {};
return new TextureAssetLoader(editedMediaItem, listener, format, frameProcessedListener);
}
private static final class FakeSampleConsumer implements SampleConsumer {
@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
return true;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {}
@Override
public void signalEndOfVideoInput() {}
}
}