Add RawAssetLoader and deprecate TextureAssetLoader

All the changes are based on existing implementations
and their related tests.

No new functionality is being added.

PiperOrigin-RevId: 625756836
This commit is contained in:
sheenachhabra 2024-04-17 11:45:57 -07:00 committed by Copybara-Service
parent 2f8ce053b9
commit 4aa2fb883f
6 changed files with 884 additions and 8 deletions

View File

@ -366,18 +366,17 @@ public class TestUtil {
* {@link Format} is returned.
*
* @param context The {@link Context};
* @param filePath The media file path.
* @param fileUri The media file uri.
* @param trackType The {@link C.TrackType}.
* @return The {@link Format} for the given {@link C.TrackType}.
* @throws ExecutionException If an error occurred while retrieving file's metadata.
* @throws InterruptedException If interrupted while retrieving file's metadata.
*/
public static Format retrieveTrackFormat(
Context context, String filePath, @C.TrackType int trackType)
Context context, String fileUri, @C.TrackType int trackType)
throws ExecutionException, InterruptedException {
TrackGroupArray trackGroupArray;
trackGroupArray =
MetadataRetriever.retrieveMetadata(context, MediaItem.fromUri("file://" + filePath)).get();
trackGroupArray = MetadataRetriever.retrieveMetadata(context, MediaItem.fromUri(fileUri)).get();
for (int i = 0; i < trackGroupArray.length; i++) {
TrackGroup trackGroup = trackGroupArray.get(i);
if (trackGroup.type == trackType) {

View File

@ -0,0 +1,318 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.test.utils.TestUtil.buildAssetUri;
import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat;
import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects;
import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap;
import static com.google.common.truth.Truth.assertThat;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.net.Uri;
import android.opengl.EGLContext;
import android.os.Looper;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil;
import androidx.media3.datasource.DataSourceBitmapLoader;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoFrameProcessor;
import androidx.media3.effect.Presentation;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
/** End to end instrumentation test for {@link RawAssetLoader} using {@link Transformer}. */
@RunWith(AndroidJUnit4.class)
public class RawAssetLoaderAndroidTest {
@Rule public final TestName testName = new TestName();
private final Context context = ApplicationProvider.getApplicationContext();
private String testId;
@Before
public void setUpTestId() {
testId = testName.getMethodName();
}
@Test
public void audioTranscoding_withRawAudio_completesWithCorrectDuration() throws Exception {
String rawAudioUri = "media/wav/sample.wav";
Format rawAudioFormat =
retrieveTrackFormat(context, buildAssetUri(rawAudioUri).toString(), C.TRACK_TYPE_AUDIO);
SettableFuture<RawAssetLoader> rawAssetLoaderFuture = SettableFuture.create();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestRawAssetLoaderFactory(
rawAudioFormat, /* videoFormat= */ null, rawAssetLoaderFuture))
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)).setDurationUs(1_000_000).build();
ListenableFuture<ExportResult> exportCompletionFuture =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.runAsync(testId, editedMediaItem);
RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get();
feedRawAudioDataToAssetLoader(rawAssetLoader, rawAudioUri);
ExportResult exportResult = exportCompletionFuture.get();
// The durationMs is the timestamp of the last sample and not the total duration.
// See b/324245196.
// Audio encoders on different API versions seems to output slightly different durations, so add
// 50ms tolerance.
assertThat(exportResult.durationMs).isAtLeast(975);
assertThat(exportResult.durationMs).isAtMost(1025);
}
@Test
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(createOpenGlObjects()))
.build();
Format videoFormat =
new Format.Builder().setWidth(bitmap.getWidth()).setHeight(bitmap.getHeight()).build();
SettableFuture<RawAssetLoader> rawAssetLoaderFuture = SettableFuture.create();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestRawAssetLoaderFactory(
/* audioFormat= */ null, videoFormat, rawAssetLoaderFuture))
.setVideoFrameProcessorFactory(videoFrameProcessorFactory)
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.build();
ListenableFuture<ExportResult> exportCompletionFuture =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.runAsync(testId, editedMediaItem);
RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get();
int firstTextureId = generateTextureFromBitmap(bitmap);
int secondTextureId = generateTextureFromBitmap(bitmap);
long lastSampleTimestampUs = C.MICROS_PER_SECOND / 2;
while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {}
while (!rawAssetLoader.queueInputTexture(secondTextureId, lastSampleTimestampUs)) {}
rawAssetLoader.signalEndOfVideoInput();
ExportResult exportResult = exportCompletionFuture.get();
assertThat(exportResult.videoFrameCount).isEqualTo(2);
// The durationMs is the timestamp of the last sample and not the total duration.
// See b/324245196.
assertThat(exportResult.durationMs).isEqualTo(lastSampleTimestampUs / 1_000);
}
@Test
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
Format videoFormat =
new Format.Builder().setWidth(bitmap.getWidth()).setHeight(bitmap.getHeight()).build();
SettableFuture<RawAssetLoader> rawAssetLoaderFuture = SettableFuture.create();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestRawAssetLoaderFactory(
/* audioFormat= */ null, videoFormat, rawAssetLoaderFuture))
.setVideoFrameProcessorFactory(videoFrameProcessorFactory)
.build();
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
ListenableFuture<ExportResult> exportCompletionFuture =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.runAsync(testId, editedMediaItem);
RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get();
int firstTextureId = generateTextureFromBitmap(bitmap);
int secondTextureId = generateTextureFromBitmap(bitmap);
long lastSampleTimestampUs = C.MICROS_PER_SECOND / 2;
while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {}
while (!rawAssetLoader.queueInputTexture(secondTextureId, lastSampleTimestampUs)) {}
rawAssetLoader.signalEndOfVideoInput();
ExportResult exportResult = exportCompletionFuture.get();
assertThat(exportResult.videoFrameCount).isEqualTo(2);
// The durationMs is the timestamp of the last sample and not the total duration.
// See b/324245196.
assertThat(exportResult.durationMs).isEqualTo(lastSampleTimestampUs / 1_000);
}
@Test
public void audioAndVideoTranscoding_withRawData_completesWithCorrectFrameCountAndDuration()
throws Exception {
String rawAudioUri = "media/wav/sample.wav";
Format audioFormat =
retrieveTrackFormat(context, buildAssetUri(rawAudioUri).toString(), C.TRACK_TYPE_AUDIO);
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(createOpenGlObjects()))
.build();
Format videoFormat =
new Format.Builder().setWidth(bitmap.getWidth()).setHeight(bitmap.getHeight()).build();
SettableFuture<RawAssetLoader> rawAssetLoaderFuture = SettableFuture.create();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestRawAssetLoaderFactory(audioFormat, videoFormat, rawAssetLoaderFuture))
.setVideoFrameProcessorFactory(videoFrameProcessorFactory)
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.build();
ListenableFuture<ExportResult> exportCompletionFuture =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.runAsync(testId, editedMediaItem);
RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get();
int firstTextureId = generateTextureFromBitmap(bitmap);
int secondTextureId = generateTextureFromBitmap(bitmap);
// Feed audio and video data in parallel so that export is not blocked waiting for all the
// tracks.
new Thread(
() -> {
// Queue raw audio data.
try {
feedRawAudioDataToAssetLoader(rawAssetLoader, rawAudioUri);
} catch (IOException e) {
throw new RuntimeException(e);
}
})
.start();
// Queue raw video data.
while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {}
while (!rawAssetLoader.queueInputTexture(
secondTextureId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2)) {}
rawAssetLoader.signalEndOfVideoInput();
ExportResult exportResult = exportCompletionFuture.get();
assertThat(exportResult.videoFrameCount).isEqualTo(2);
// The durationMs is the timestamp of the last audio sample and not the total duration.
// See b/324245196.
// Audio encoders on different API versions seems to output slightly different durations, so add
// 50ms tolerance.
assertThat(exportResult.durationMs).isAtLeast(975);
assertThat(exportResult.durationMs).isAtMost(1025);
}
private void feedRawAudioDataToAssetLoader(RawAssetLoader rawAssetLoader, String audioAssetUri)
throws IOException {
// TODO: b/270695884 - Use media3 extractor to extract the samples.
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(context.getResources().getAssets().openFd(audioAssetUri));
// The audio only file should have only one track.
MediaFormat audioFormat = extractor.getTrackFormat(0);
checkState(MimeTypes.isAudio(audioFormat.getString(MediaFormat.KEY_MIME)));
extractor.selectTrack(0);
int maxSampleSize = 34_000;
do {
long samplePresentationTimeUs = extractor.getSampleTime();
ByteBuffer sampleBuffer = ByteBuffer.allocateDirect(maxSampleSize);
if (extractor.readSampleData(sampleBuffer, /* offset= */ 0) == -1) {
break;
}
while (true) {
if (rawAssetLoader.queueAudioData(
sampleBuffer, samplePresentationTimeUs, /* isLast= */ false)) {
break;
}
}
} while (extractor.advance());
extractor.release();
checkState(rawAssetLoader.queueAudioData(ByteBuffer.allocate(0), 0, /* isLast= */ true));
}
private static final class TestRawAssetLoaderFactory implements AssetLoader.Factory {
private final Format audioFormat;
private final Format videoFormat;
private final SettableFuture<RawAssetLoader> assetLoaderSettableFuture;
public TestRawAssetLoaderFactory(
@Nullable Format audioFormat,
@Nullable Format videoFormat,
SettableFuture<RawAssetLoader> assetLoaderSettableFuture) {
this.audioFormat = audioFormat;
this.videoFormat = videoFormat;
this.assetLoaderSettableFuture = assetLoaderSettableFuture;
}
@Override
public RawAssetLoader createAssetLoader(
EditedMediaItem editedMediaItem,
Looper looper,
AssetLoader.Listener listener,
AssetLoader.CompositionSettings compositionSettings) {
OnInputFrameProcessedListener frameProcessedListener =
(texId, syncObject) -> {
try {
GlUtil.deleteTexture(texId);
GlUtil.deleteSyncObject(syncObject);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
};
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
editedMediaItem, listener, audioFormat, videoFormat, frameProcessedListener);
assetLoaderSettableFuture.set(rawAssetLoader);
return rawAssetLoader;
}
}
}

View File

@ -37,6 +37,8 @@ import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.test.utils.SsimHelper;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.base.Ascii;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.File;
import java.io.IOException;
@ -179,6 +181,35 @@ public class TransformerAndroidTestRunner {
this.inputValues = inputValues;
}
/** Exports the {@link EditedMediaItem} asynchronously. */
public ListenableFuture<ExportResult> runAsync(String testId, EditedMediaItem editedMediaItem)
throws IOException {
SettableFuture<ExportResult> completionFuture = SettableFuture.create();
File outputVideoFile = createOutputFile(testId);
InstrumentationRegistry.getInstrumentation()
.runOnMainSync(
() -> {
transformer.addListener(
new Transformer.Listener() {
@Override
public void onCompleted(Composition composition, ExportResult exportResult) {
completionFuture.set(exportResult);
}
@Override
public void onError(
Composition composition,
ExportResult exportResult,
ExportException exportException) {
completionFuture.setException(exportException);
}
});
transformer.start(editedMediaItem, outputVideoFile.getAbsolutePath());
});
return completionFuture;
}
/**
* Exports the {@link Composition}, saving a summary of the export to the application cache.
*
@ -358,10 +389,7 @@ public class TransformerAndroidTestRunner {
})
.build();
File outputVideoFile =
AndroidTestUtil.createExternalCacheFile(
context,
/* fileName= */ testId + "-" + Clock.DEFAULT.elapsedRealtime() + "-output.mp4");
File outputVideoFile = createOutputFile(testId);
InstrumentationRegistry.getInstrumentation()
.runOnMainSync(
() -> {
@ -455,6 +483,11 @@ public class TransformerAndroidTestRunner {
return testResultBuilder.build();
}
private File createOutputFile(String testId) throws IOException {
return AndroidTestUtil.createExternalCacheFile(
context, /* fileName= */ testId + "-" + Clock.DEFAULT.elapsedRealtime() + "-output.mp4");
}
/** Returns whether the context is connected to the network. */
private static boolean hasNetworkConnection(Context context) {
ConnectivityManager connectivityManager =

View File

@ -0,0 +1,272 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.transformer.ExportException.ERROR_CODE_UNSPECIFIED;
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_END_OF_STREAM;
import static androidx.media3.transformer.SampleConsumer.INPUT_RESULT_TRY_AGAIN_LATER;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_NOT_STARTED;
import static androidx.media3.transformer.TransformerUtil.getValidColor;
import static java.lang.Math.min;
import static java.lang.Math.round;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.decoder.DecoderInputBuffer;
import com.google.common.collect.ImmutableMap;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* An {@link AssetLoader} implementation that loads raw audio and/or video data.
*
* <p>Typically instantiated in a custom {@link AssetLoader.Factory} saving a reference to the
* created {@link RawAssetLoader}.
*
* <p>Provide raw audio data as input by calling {@link #queueAudioData}. This method must always be
* called from the same thread, which can be any thread.
*
* <p>Provide video frames as input by calling {@link #queueInputTexture}, then {@link
* #signalEndOfVideoInput() signal the end of input} when finished. These two methods must be called
* from the same thread, which can be any thread.
*
* <p>All other methods are for internal use only and must never be called.
*/
@UnstableApi
public final class RawAssetLoader implements AssetLoader {
private final EditedMediaItem editedMediaItem;
private final Listener assetLoaderListener;
private final @MonotonicNonNull Format audioFormat;
private final @MonotonicNonNull Format videoFormat;
private final @MonotonicNonNull OnInputFrameProcessedListener frameProcessedListener;
private @MonotonicNonNull SampleConsumer audioSampleConsumer;
private @MonotonicNonNull SampleConsumer videoSampleConsumer;
private @Transformer.ProgressState int progressState;
private boolean isVideoTrackAdded;
private boolean isAudioTrackAdded;
private boolean isAudioEndOfStreamSignaled;
private boolean isVideoEndOfStreamSignaled;
// Read on app's thread and written on internal thread.
private volatile boolean isStarted;
// Read on internal thread and written on app's thread.
private volatile long lastQueuedAudioPresentationTimeUs;
// Read on internal thread and written on app's thread.
private volatile long lastQueuedVideoPresentationTimeUs;
/**
* Creates an instance.
*
* @param editedMediaItem The {@link EditedMediaItem} for which raw data is provided. The {@link
* EditedMediaItem#durationUs} must be set.
* @param assetLoaderListener Listener for asset loading events.
* @param audioFormat The audio format, or {@code null} if only video data is provided.
* @param videoFormat The video format, or {@code null} if only audio data is provided. The {@link
* Format#width} and the {@link Format#height} must be set.
* @param frameProcessedListener Listener for the event when a frame has been processed, or {@code
* null} if only audio data is provided. The listener receives a GL sync object (if supported)
* to allow reusing the texture after it's no longer in use.
*/
public RawAssetLoader(
EditedMediaItem editedMediaItem,
Listener assetLoaderListener,
@Nullable Format audioFormat,
@Nullable Format videoFormat,
@Nullable OnInputFrameProcessedListener frameProcessedListener) {
checkArgument(audioFormat != null || videoFormat != null);
checkArgument(editedMediaItem.durationUs != C.TIME_UNSET);
checkArgument(
videoFormat == null
|| (videoFormat.height != Format.NO_VALUE && videoFormat.width != Format.NO_VALUE));
this.editedMediaItem = editedMediaItem;
this.assetLoaderListener = assetLoaderListener;
this.audioFormat = audioFormat;
this.videoFormat =
videoFormat != null
? videoFormat
.buildUpon()
.setColorInfo(getValidColor(videoFormat.colorInfo))
.setSampleMimeType(MimeTypes.VIDEO_RAW)
.build()
: null;
this.frameProcessedListener = frameProcessedListener;
progressState = PROGRESS_STATE_NOT_STARTED;
lastQueuedAudioPresentationTimeUs = Long.MAX_VALUE;
lastQueuedVideoPresentationTimeUs = Long.MAX_VALUE;
}
@Override
public void start() {
progressState = PROGRESS_STATE_AVAILABLE;
assetLoaderListener.onDurationUs(editedMediaItem.durationUs);
int trackCount = 1;
if (audioFormat != null && videoFormat != null) {
trackCount = 2;
}
assetLoaderListener.onTrackCount(trackCount);
isStarted = true;
}
@Override
public @Transformer.ProgressState int getProgress(ProgressHolder progressHolder) {
if (progressState == PROGRESS_STATE_AVAILABLE) {
long lastTimestampUs =
min(lastQueuedAudioPresentationTimeUs, lastQueuedVideoPresentationTimeUs);
if (lastTimestampUs == Long.MAX_VALUE) {
lastTimestampUs = 0;
}
progressHolder.progress = round((lastTimestampUs / (float) editedMediaItem.durationUs) * 100);
}
return progressState;
}
@Override
public ImmutableMap<Integer, String> getDecoderNames() {
return ImmutableMap.of();
}
@Override
public void release() {
progressState = PROGRESS_STATE_NOT_STARTED;
}
/**
* Attempts to provide an input texture.
*
* <p>Must be called on the same thread as {@link #signalEndOfVideoInput}.
*
* @param texId The ID of the texture to queue.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
public boolean queueInputTexture(int texId, long presentationTimeUs) {
checkState(!isVideoEndOfStreamSignaled);
try {
if (!isVideoTrackAdded) {
if (!isStarted) {
return false;
}
assetLoaderListener.onTrackAdded(checkNotNull(videoFormat), SUPPORTED_OUTPUT_TYPE_DECODED);
isVideoTrackAdded = true;
}
if (videoSampleConsumer == null) {
@Nullable
SampleConsumer sampleConsumer =
assetLoaderListener.onOutputFormat(checkNotNull(videoFormat));
if (sampleConsumer == null) {
return false;
} else {
videoSampleConsumer = sampleConsumer;
sampleConsumer.setOnInputFrameProcessedListener(checkNotNull(frameProcessedListener));
}
}
@SampleConsumer.InputResult
int result = videoSampleConsumer.queueInputTexture(texId, presentationTimeUs);
if (result == INPUT_RESULT_TRY_AGAIN_LATER) {
return false;
}
if (result == INPUT_RESULT_END_OF_STREAM) {
isVideoEndOfStreamSignaled = true;
}
lastQueuedVideoPresentationTimeUs = presentationTimeUs;
return true;
} catch (ExportException e) {
assetLoaderListener.onError(e);
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
return false;
}
/**
* Signals that no further input frames will be rendered.
*
* <p>Must be called on the same thread as {@link #queueInputTexture}.
*/
public void signalEndOfVideoInput() {
try {
if (!isVideoEndOfStreamSignaled) {
isVideoEndOfStreamSignaled = true;
checkNotNull(videoSampleConsumer).signalEndOfVideoInput();
}
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
}
/**
* Attempts to provide raw audio data.
*
* @param audioData The raw audio data. The {@link ByteBuffer} can be reused after calling this
* method.
* @param presentationTimeUs The presentation time for the raw audio data, in microseconds.
* @param isLast Signals the last audio data.
* @return Whether the raw audio data was successfully queued. If {@code false}, the caller should
* try again later.
*/
public boolean queueAudioData(ByteBuffer audioData, long presentationTimeUs, boolean isLast) {
checkState(!isAudioEndOfStreamSignaled);
if (!isStarted) {
return false;
}
try {
if (!isAudioTrackAdded) {
assetLoaderListener.onTrackAdded(checkNotNull(audioFormat), SUPPORTED_OUTPUT_TYPE_DECODED);
isAudioTrackAdded = true;
}
if (audioSampleConsumer == null) {
@Nullable
SampleConsumer sampleConsumer =
assetLoaderListener.onOutputFormat(checkNotNull(audioFormat));
if (sampleConsumer == null) {
return false;
} else {
audioSampleConsumer = sampleConsumer;
}
}
DecoderInputBuffer decoderInputBuffer = audioSampleConsumer.getInputBuffer();
if (decoderInputBuffer == null) {
return false;
}
decoderInputBuffer.ensureSpaceForWrite(audioData.remaining());
decoderInputBuffer.data.put(audioData).flip();
if (isLast) {
decoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
}
if (audioSampleConsumer.queueInputBuffer()) {
lastQueuedAudioPresentationTimeUs = presentationTimeUs;
isAudioEndOfStreamSignaled = isLast;
return true;
}
} catch (ExportException e) {
assetLoaderListener.onError(e);
} catch (RuntimeException e) {
assetLoaderListener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
}
return false;
}
}

View File

@ -43,7 +43,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* created {@link TextureAssetLoader}. Provide video frames as input by calling {@link
* #queueInputTexture}, then {@link #signalEndOfVideoInput() signal the end of input} when finished.
* Those methods must be called from the same thread, which can be any thread.
*
* @deprecated Use {@link RawAssetLoader}.
*/
@Deprecated
@UnstableApi
public final class TextureAssetLoader implements AssetLoader {
private final EditedMediaItem editedMediaItem;

View File

@ -0,0 +1,251 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static com.google.common.truth.Truth.assertThat;
import static java.lang.Math.min;
import static java.lang.Math.round;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.test.utils.TestUtil;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.nio.ByteBuffer;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit tests for {@link RawAssetLoader}. */
@RunWith(AndroidJUnit4.class)
public class RawAssetLoaderTest {
private static final Format FAKE_AUDIO_FORMAT =
new Format.Builder()
.setSampleRate(48000)
.setChannelCount(2)
.setPcmEncoding(C.ENCODING_PCM_16BIT)
.build();
private static final Format FAKE_VIDEO_FORMAT =
new Format.Builder().setWidth(10).setHeight(10).build();
private static final byte[] FAKE_AUDIO_DATA = TestUtil.createByteArray(1, 2, 3, 4);
@Test
public void rawAssetLoader_withOnlyAudioData_successfullyQueuesAudioData() {
long audioDurationUs = 1_000;
FakeAudioSampleConsumer fakeAudioSampleConsumer = new FakeAudioSampleConsumer();
AssetLoader.Listener fakeAssetLoaderListener =
new FakeAssetLoaderListener(fakeAudioSampleConsumer, /* videoSampleConsumer= */ null);
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
getEditedMediaItem(audioDurationUs),
fakeAssetLoaderListener,
FAKE_AUDIO_FORMAT,
/* videoFormat= */ null,
/* frameProcessedListener= */ null);
rawAssetLoader.start();
boolean queuedAudioData =
rawAssetLoader.queueAudioData(
ByteBuffer.wrap(FAKE_AUDIO_DATA), /* presentationTimeUs= */ 100, /* isLast= */ false);
assertThat(queuedAudioData).isTrue();
assertThat(fakeAudioSampleConsumer.inputBufferQueued).isTrue();
}
@Test
public void rawAssetLoader_withOnlyVideoData_successfullyQueuesInputTexture() {
long videoDurationUs = 1_000;
FakeVideoSampleConsumer fakeVideoSampleConsumer = new FakeVideoSampleConsumer();
AssetLoader.Listener fakeAssetLoaderListener =
new FakeAssetLoaderListener(/* audioSampleConsumer= */ null, fakeVideoSampleConsumer);
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
getEditedMediaItem(videoDurationUs),
fakeAssetLoaderListener,
/* audioFormat= */ null,
FAKE_VIDEO_FORMAT,
/* frameProcessedListener= */ (unused, unused2) -> {});
rawAssetLoader.start();
boolean queuedInputTexture =
rawAssetLoader.queueInputTexture(/* texId= */ 0, /* presentationTimeUs= */ 0);
rawAssetLoader.signalEndOfVideoInput();
assertThat(queuedInputTexture).isTrue();
assertThat(fakeVideoSampleConsumer.inputTextureQueued).isTrue();
}
@Test
public void getProgress_withOnlyAudioData_returnsExpectedProgress() {
long audioDurationUs = 1_000;
long audioSamplePresentationTimeUs = 100;
AssetLoader.Listener fakeAssetLoaderListener =
new FakeAssetLoaderListener(new FakeAudioSampleConsumer(), /* videoSampleConsumer= */ null);
ProgressHolder progressHolder = new ProgressHolder();
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
getEditedMediaItem(audioDurationUs),
fakeAssetLoaderListener,
FAKE_AUDIO_FORMAT,
/* videoFormat= */ null,
/* frameProcessedListener= */ null);
rawAssetLoader.start();
boolean queuedAudioData =
rawAssetLoader.queueAudioData(
ByteBuffer.wrap(FAKE_AUDIO_DATA), audioSamplePresentationTimeUs, /* isLast= */ false);
@Transformer.ProgressState int progressState = rawAssetLoader.getProgress(progressHolder);
assertThat(queuedAudioData).isTrue();
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
assertThat(progressHolder.progress)
.isEqualTo(round(audioSamplePresentationTimeUs * 100 / (float) audioDurationUs));
}
@Test
public void getProgress_withOnlyVideoData_returnsExpectedProgress() throws ExportException {
long videoDurationUs = 1_000;
long videoSamplePresentationTimeUs = 100;
AssetLoader.Listener fakeAssetLoaderListener =
new FakeAssetLoaderListener(/* audioSampleConsumer= */ null, new FakeVideoSampleConsumer());
ProgressHolder progressHolder = new ProgressHolder();
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
getEditedMediaItem(videoDurationUs),
fakeAssetLoaderListener,
/* audioFormat= */ null,
FAKE_VIDEO_FORMAT,
/* frameProcessedListener= */ (unused, unused2) -> {});
rawAssetLoader.start();
boolean queuedInputTexture =
rawAssetLoader.queueInputTexture(/* texId= */ 0, videoSamplePresentationTimeUs);
@Transformer.ProgressState int progressState = rawAssetLoader.getProgress(progressHolder);
assertThat(queuedInputTexture).isTrue();
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
assertThat(progressHolder.progress)
.isEqualTo(round(videoSamplePresentationTimeUs * 100 / (float) videoDurationUs));
}
@Test
public void getProgress_withBothAudioAndVideoData_returnsMinimumProgress() {
long mediaDurationUs = 1_000;
long audioSamplePresentationTimeUs = 100;
long videoSamplePresentationTimeUs = 500;
AssetLoader.Listener fakeAssetLoaderListener =
new FakeAssetLoaderListener(new FakeAudioSampleConsumer(), new FakeVideoSampleConsumer());
ProgressHolder progressHolder = new ProgressHolder();
RawAssetLoader rawAssetLoader =
new RawAssetLoader(
getEditedMediaItem(mediaDurationUs),
fakeAssetLoaderListener,
FAKE_AUDIO_FORMAT,
FAKE_VIDEO_FORMAT,
/* frameProcessedListener= */ (unused, unused2) -> {});
rawAssetLoader.start();
boolean queuedAudioData =
rawAssetLoader.queueAudioData(
ByteBuffer.wrap(FAKE_AUDIO_DATA), audioSamplePresentationTimeUs, /* isLast= */ false);
boolean queuedInputTexture =
rawAssetLoader.queueInputTexture(/* texId= */ 0, videoSamplePresentationTimeUs);
@Transformer.ProgressState int progressState = rawAssetLoader.getProgress(progressHolder);
assertThat(queuedAudioData).isTrue();
assertThat(queuedInputTexture).isTrue();
assertThat(progressState).isEqualTo(PROGRESS_STATE_AVAILABLE);
assertThat(progressHolder.progress)
.isEqualTo(
round(
min(audioSamplePresentationTimeUs, videoSamplePresentationTimeUs)
* 100
/ (float) mediaDurationUs));
}
private static EditedMediaItem getEditedMediaItem(long mediaDurationUs) {
return new EditedMediaItem.Builder(new MediaItem.Builder().build())
.setDurationUs(mediaDurationUs)
.build();
}
private static class FakeAssetLoaderListener implements AssetLoader.Listener {
@Nullable private final SampleConsumer audioSampleConsumer;
@Nullable private final SampleConsumer videoSampleConsumer;
public FakeAssetLoaderListener(
@Nullable SampleConsumer audioSampleConsumer,
@Nullable SampleConsumer videoSampleConsumer) {
this.audioSampleConsumer = audioSampleConsumer;
this.videoSampleConsumer = videoSampleConsumer;
}
@Override
public void onDurationUs(long durationUs) {}
@Override
public void onTrackCount(int trackCount) {}
@Override
public boolean onTrackAdded(
Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
return true;
}
@Nullable
@Override
public SampleConsumer onOutputFormat(Format format) {
return MimeTypes.isVideo(format.sampleMimeType) ? videoSampleConsumer : audioSampleConsumer;
}
@Override
public void onError(ExportException exportException) {}
}
private static class FakeAudioSampleConsumer implements SampleConsumer {
public boolean inputBufferQueued;
@Override
public DecoderInputBuffer getInputBuffer() {
return new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_NORMAL);
}
@Override
public boolean queueInputBuffer() {
inputBufferQueued = true;
return true;
}
}
private static class FakeVideoSampleConsumer implements SampleConsumer {
public boolean inputTextureQueued;
@Override
public @InputResult int queueInputTexture(int texId, long presentationTimeUs) {
inputTextureQueued = true;
return INPUT_RESULT_SUCCESS;
}
@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {}
}
}