diff --git a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/RawAssetLoaderAndroidTest.java b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/RawAssetLoaderAndroidTest.java index 6eb31b6f40..e2fbdad1ea 100644 --- a/libraries/transformer/src/androidTest/java/androidx/media3/transformer/RawAssetLoaderAndroidTest.java +++ b/libraries/transformer/src/androidTest/java/androidx/media3/transformer/RawAssetLoaderAndroidTest.java @@ -15,9 +15,6 @@ */ package androidx.media3.transformer; -import static androidx.media3.common.util.Assertions.checkState; -import static androidx.media3.test.utils.TestUtil.buildAssetUri; -import static androidx.media3.test.utils.TestUtil.retrieveTrackFormat; import static androidx.media3.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING; import static androidx.media3.transformer.AndroidTestUtil.createOpenGlObjects; import static androidx.media3.transformer.AndroidTestUtil.generateTextureFromBitmap; @@ -25,8 +22,6 @@ import static com.google.common.truth.Truth.assertThat; import android.content.Context; import android.graphics.Bitmap; -import android.media.MediaExtractor; -import android.media.MediaFormat; import android.net.Uri; import android.opengl.EGLContext; import android.os.Looper; @@ -38,7 +33,9 @@ import androidx.media3.common.MediaItem; import androidx.media3.common.MimeTypes; import androidx.media3.common.OnInputFrameProcessedListener; import androidx.media3.common.VideoFrameProcessingException; +import androidx.media3.common.audio.AudioProcessor.AudioFormat; import androidx.media3.common.util.GlUtil; +import androidx.media3.common.util.Util; import androidx.media3.datasource.DataSourceBitmapLoader; import androidx.media3.effect.DefaultGlObjectsProvider; import androidx.media3.effect.DefaultVideoFrameProcessor; @@ -48,7 +45,6 @@ import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; -import java.io.IOException; import java.nio.ByteBuffer; import org.junit.Before; import org.junit.Rule; @@ -61,6 +57,14 @@ import org.junit.runner.RunWith; public class RawAssetLoaderAndroidTest { @Rule public final TestName testName = new TestName(); + private static final Format AUDIO_FORMAT = + new Format.Builder() + .setSampleMimeType(MimeTypes.AUDIO_RAW) + .setSampleRate(44_100) + .setChannelCount(1) + .setPcmEncoding(C.ENCODING_PCM_16BIT) + .build(); + private final Context context = ApplicationProvider.getApplicationContext(); private String testId; @@ -72,25 +76,25 @@ public class RawAssetLoaderAndroidTest { @Test public void audioTranscoding_withRawAudio_completesWithCorrectDuration() throws Exception { - String rawAudioUri = "media/wav/sample.wav"; - Format rawAudioFormat = - retrieveTrackFormat(context, buildAssetUri(rawAudioUri).toString(), C.TRACK_TYPE_AUDIO); SettableFuture rawAssetLoaderFuture = SettableFuture.create(); Transformer transformer = new Transformer.Builder(context) .setAssetLoaderFactory( new TestRawAssetLoaderFactory( - rawAudioFormat, /* videoFormat= */ null, rawAssetLoaderFuture)) + AUDIO_FORMAT, /* videoFormat= */ null, rawAssetLoaderFuture)) .build(); + long mediaDurationUs = C.MICROS_PER_SECOND; EditedMediaItem editedMediaItem = - new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)).setDurationUs(1_000_000).build(); + new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)) + .setDurationUs(mediaDurationUs) + .build(); ListenableFuture exportCompletionFuture = new TransformerAndroidTestRunner.Builder(context, transformer) .build() .runAsync(testId, editedMediaItem); RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get(); - feedRawAudioDataToAssetLoader(rawAssetLoader, rawAudioUri); + feedRawAudioDataToAssetLoader(rawAssetLoader, AUDIO_FORMAT, mediaDurationUs); ExportResult exportResult = exportCompletionFuture.get(); // The durationMs is the timestamp of the last sample and not the total duration. @@ -120,9 +124,10 @@ public class RawAssetLoaderAndroidTest { /* audioFormat= */ null, videoFormat, rawAssetLoaderFuture)) .setVideoFrameProcessorFactory(videoFrameProcessorFactory) .build(); + long mediaDurationUs = C.MICROS_PER_SECOND; EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)) - .setDurationUs(C.MICROS_PER_SECOND) + .setDurationUs(mediaDurationUs) .build(); ListenableFuture exportCompletionFuture = new TransformerAndroidTestRunner.Builder(context, transformer) @@ -132,7 +137,7 @@ public class RawAssetLoaderAndroidTest { RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get(); int firstTextureId = generateTextureFromBitmap(bitmap); int secondTextureId = generateTextureFromBitmap(bitmap); - long lastSampleTimestampUs = C.MICROS_PER_SECOND / 2; + long lastSampleTimestampUs = mediaDurationUs / 2; while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {} while (!rawAssetLoader.queueInputTexture(secondTextureId, lastSampleTimestampUs)) {} rawAssetLoader.signalEndOfVideoInput(); @@ -165,9 +170,10 @@ public class RawAssetLoaderAndroidTest { .setVideoFrameProcessorFactory(videoFrameProcessorFactory) .build(); ImmutableList videoEffects = ImmutableList.of(Presentation.createForHeight(480)); + long mediaDurationUs = C.MICROS_PER_SECOND; EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)) - .setDurationUs(C.MICROS_PER_SECOND) + .setDurationUs(mediaDurationUs) .setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects)) .build(); ListenableFuture exportCompletionFuture = @@ -178,7 +184,7 @@ public class RawAssetLoaderAndroidTest { RawAssetLoader rawAssetLoader = rawAssetLoaderFuture.get(); int firstTextureId = generateTextureFromBitmap(bitmap); int secondTextureId = generateTextureFromBitmap(bitmap); - long lastSampleTimestampUs = C.MICROS_PER_SECOND / 2; + long lastSampleTimestampUs = mediaDurationUs / 2; while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {} while (!rawAssetLoader.queueInputTexture(secondTextureId, lastSampleTimestampUs)) {} rawAssetLoader.signalEndOfVideoInput(); @@ -193,9 +199,6 @@ public class RawAssetLoaderAndroidTest { @Test public void audioAndVideoTranscoding_withRawData_completesWithCorrectFrameCountAndDuration() throws Exception { - String rawAudioUri = "media/wav/sample.wav"; - Format audioFormat = - retrieveTrackFormat(context, buildAssetUri(rawAudioUri).toString(), C.TRACK_TYPE_AUDIO); Bitmap bitmap = new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get(); DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory = @@ -208,12 +211,13 @@ public class RawAssetLoaderAndroidTest { Transformer transformer = new Transformer.Builder(context) .setAssetLoaderFactory( - new TestRawAssetLoaderFactory(audioFormat, videoFormat, rawAssetLoaderFuture)) + new TestRawAssetLoaderFactory(AUDIO_FORMAT, videoFormat, rawAssetLoaderFuture)) .setVideoFrameProcessorFactory(videoFrameProcessorFactory) .build(); + long mediaDurationUs = C.MICROS_PER_SECOND; EditedMediaItem editedMediaItem = new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY)) - .setDurationUs(C.MICROS_PER_SECOND) + .setDurationUs(mediaDurationUs) .build(); ListenableFuture exportCompletionFuture = new TransformerAndroidTestRunner.Builder(context, transformer) @@ -226,19 +230,13 @@ public class RawAssetLoaderAndroidTest { // Feed audio and video data in parallel so that export is not blocked waiting for all the // tracks. new Thread( - () -> { - // Queue raw audio data. - try { - feedRawAudioDataToAssetLoader(rawAssetLoader, rawAudioUri); - } catch (IOException e) { - throw new RuntimeException(e); - } - }) + () -> // Queue raw audio data. + feedRawAudioDataToAssetLoader(rawAssetLoader, AUDIO_FORMAT, mediaDurationUs)) .start(); // Queue raw video data. while (!rawAssetLoader.queueInputTexture(firstTextureId, /* presentationTimeUs= */ 0)) {} while (!rawAssetLoader.queueInputTexture( - secondTextureId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2)) {} + secondTextureId, /* presentationTimeUs= */ mediaDurationUs / 2)) {} rawAssetLoader.signalEndOfVideoInput(); ExportResult exportResult = exportCompletionFuture.get(); @@ -251,32 +249,27 @@ public class RawAssetLoaderAndroidTest { assertThat(exportResult.durationMs).isAtMost(1025); } - private void feedRawAudioDataToAssetLoader(RawAssetLoader rawAssetLoader, String audioAssetUri) - throws IOException { - // TODO: b/270695884 - Use media3 extractor to extract the samples. - MediaExtractor extractor = new MediaExtractor(); - extractor.setDataSource(context.getResources().getAssets().openFd(audioAssetUri)); - - // The audio only file should have only one track. - MediaFormat audioFormat = extractor.getTrackFormat(0); - checkState(MimeTypes.isAudio(audioFormat.getString(MediaFormat.KEY_MIME))); - extractor.selectTrack(0); - int maxSampleSize = 34_000; - do { - long samplePresentationTimeUs = extractor.getSampleTime(); - ByteBuffer sampleBuffer = ByteBuffer.allocateDirect(maxSampleSize); - if (extractor.readSampleData(sampleBuffer, /* offset= */ 0) == -1) { - break; - } - while (true) { - if (rawAssetLoader.queueAudioData( - sampleBuffer, samplePresentationTimeUs, /* isLast= */ false)) { - break; - } - } - } while (extractor.advance()); - extractor.release(); - checkState(rawAssetLoader.queueAudioData(ByteBuffer.allocate(0), 0, /* isLast= */ true)); + private void feedRawAudioDataToAssetLoader( + RawAssetLoader rawAssetLoader, Format rawAudioFormat, long durationUs) { + AudioFormat audioFormat = new AudioFormat(rawAudioFormat); + SilentAudioGenerator silentAudioGenerator = new SilentAudioGenerator(audioFormat); + silentAudioGenerator.addSilence(durationUs); + int bytesWritten = 0; + while (silentAudioGenerator.hasRemaining()) { + ByteBuffer byteBuffer = silentAudioGenerator.getBuffer(); + int byteBufferSize = byteBuffer.remaining(); + while (!rawAssetLoader.queueAudioData( + byteBuffer, + /* presentationTimeUs= */ Util.sampleCountToDurationUs( + bytesWritten / audioFormat.bytesPerFrame, audioFormat.sampleRate), + /* isLast= */ false)) {} + bytesWritten += byteBufferSize; + } + while (!rawAssetLoader.queueAudioData( + ByteBuffer.allocate(0), + /* presentationTimeUs= */ Util.sampleCountToDurationUs( + bytesWritten / audioFormat.bytesPerFrame, audioFormat.sampleRate), + /* isLast= */ true)) {} } private static final class TestRawAssetLoaderFactory implements AssetLoader.Factory {