Publish CompositionPlayer for playing compositions

This class is not ready for production app usage yet, so it is still
marked `@RestrictTo(LIBRARY_GROUP)` for now. Apps can experiment with it
in a non-prod context by suppressing the associated lint error.

* Issue: androidx/media#1014
* Issue: androidx/media#1185
* Issue: androidx/media#816

PiperOrigin-RevId: 633921353
This commit is contained in:
ibaker 2024-05-15 05:52:35 -07:00 committed by Copybara-Service
parent 67554395cb
commit 0e5a5e0294
32 changed files with 5443 additions and 0 deletions

View File

@ -0,0 +1,6 @@
# Composition demo
This app is an experimental demo app to demonstrate how to use Composition and CompositionPlayer APIs.
See the [demos README](../README.md) for instructions on how to build and run
this demo.

View File

@ -0,0 +1,63 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
apply from: '../../constants.gradle'
apply plugin: 'com.android.application'
android {
namespace 'androidx.media3.demo.composition'
compileSdk project.ext.compileSdkVersion
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
defaultConfig {
versionName project.ext.releaseVersion
versionCode project.ext.releaseVersionCode
minSdkVersion 21
targetSdkVersion project.ext.appTargetSdkVersion
multiDexEnabled true
}
buildTypes {
release {
shrinkResources true
minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.txt'
signingConfig signingConfigs.debug
}
}
lintOptions {
// This demo app isn't indexed and doesn't have translations.
disable 'GoogleAppIndexingWarning','MissingTranslation'
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:' + androidxMaterialVersion
implementation project(modulePrefix + 'lib-effect')
implementation project(modulePrefix + 'lib-exoplayer')
implementation project(modulePrefix + 'lib-exoplayer-dash')
implementation project(modulePrefix + 'lib-transformer')
implementation project(modulePrefix + 'lib-ui')
implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
implementation 'androidx.multidex:multidex:' + androidxMultidexVersion
compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
}

View File

@ -0,0 +1 @@
# Proguard rules specific to the composition demo app.

View File

@ -0,0 +1,51 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="androidx.media3.demo.composition">
<uses-sdk />
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO"/>
<application
android:allowBackup="false"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:requestLegacyExternalStorage="true"
tools:targetApi="29"
android:taskAffinity=""
android:theme="@style/Theme.AppCompat" >
<activity android:name=".CompositionPreviewActivity"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize|screenLayout|smallestScreenSize|uiMode"
android:launchMode="singleTop"
android:label="@string/app_name"
android:exported="true"
android:theme="@style/Theme.MaterialComponents.DayNight.NoActionBar">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,69 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.demo.composition;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.recyclerview.widget.RecyclerView;
import java.util.List;
/** A {@link RecyclerView.Adapter} that displays assets in a sequence in a {@link RecyclerView}. */
public final class AssetItemAdapter extends RecyclerView.Adapter<AssetItemAdapter.ViewHolder> {
private static final String TAG = "AssetItemAdapter";
private final List<String> data;
/**
* Creates a new instance
*
* @param data A list of items to populate RecyclerView with.
*/
public AssetItemAdapter(List<String> data) {
this.data = data;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View v = LayoutInflater.from(parent.getContext()).inflate(R.layout.preset_item, parent, false);
return new ViewHolder(v);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
holder.getTextView().setText(data.get(position));
}
@Override
public int getItemCount() {
return data.size();
}
/** A {@link RecyclerView.ViewHolder} used to build {@link AssetItemAdapter}. */
public static final class ViewHolder extends RecyclerView.ViewHolder {
private final TextView textView;
private ViewHolder(View view) {
super(view);
textView = view.findViewById(R.id.preset_name_text);
}
private TextView getTextView() {
return textView;
}
}
}

View File

@ -0,0 +1,354 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.demo.composition;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.transformer.ImageUtil.getCommonImageMimeTypeFromExtension;
import android.app.Activity;
import android.content.DialogInterface;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.AppCompatButton;
import androidx.appcompat.widget.AppCompatTextView;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player;
import androidx.media3.common.audio.SonicAudioProcessor;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Util;
import androidx.media3.effect.RgbFilter;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.ExportException;
import androidx.media3.transformer.ExportResult;
import androidx.media3.transformer.JsonUtil;
import androidx.media3.transformer.Transformer;
import androidx.media3.ui.PlayerView;
import androidx.recyclerview.widget.DividerItemDecoration;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.google.common.base.Stopwatch;
import com.google.common.base.Ticker;
import com.google.common.collect.ImmutableList;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.json.JSONException;
import org.json.JSONObject;
/**
* An {@link Activity} that previews compositions, using {@link
* androidx.media3.transformer.CompositionPlayer}.
*/
public final class CompositionPreviewActivity extends AppCompatActivity {
private static final String TAG = "CompPreviewActivity";
private final ArrayList<String> sequenceAssetTitles = new ArrayList<>();
@Nullable private boolean[] selectedMediaItems = null;
private String[] presetFileDescriptions = new String[0];
@Nullable private AssetItemAdapter assetItemAdapter;
@Nullable private CompositionPlayer compositionPlayer;
@Nullable private Transformer transformer;
@Nullable private File outputFile;
private @MonotonicNonNull PlayerView playerView;
private @MonotonicNonNull AppCompatButton exportButton;
private @MonotonicNonNull AppCompatTextView exportInformationTextView;
private @MonotonicNonNull Stopwatch exportStopwatch;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.composition_preview_activity);
playerView = findViewById(R.id.composition_player_view);
findViewById(R.id.preview_button).setOnClickListener(this::previewComposition);
findViewById(R.id.edit_sequence_button).setOnClickListener(this::selectPresetFile);
RecyclerView presetList = findViewById(R.id.composition_preset_list);
presetList.addItemDecoration(new DividerItemDecoration(this, DividerItemDecoration.VERTICAL));
LinearLayoutManager layoutManager =
new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, /* reverseLayout= */ false);
presetList.setLayoutManager(layoutManager);
exportInformationTextView = findViewById(R.id.export_information_text);
exportButton = findViewById(R.id.composition_export_button);
exportButton.setOnClickListener(this::exportComposition);
presetFileDescriptions = getResources().getStringArray(R.array.preset_descriptions);
// Select two media items by default.
selectedMediaItems = new boolean[presetFileDescriptions.length];
selectedMediaItems[0] = true;
selectedMediaItems[2] = true;
for (int i = 0; i < checkNotNull(selectedMediaItems).length; i++) {
if (checkNotNull(selectedMediaItems)[i]) {
sequenceAssetTitles.add(presetFileDescriptions[i]);
}
}
assetItemAdapter = new AssetItemAdapter(sequenceAssetTitles);
presetList.setAdapter(assetItemAdapter);
exportStopwatch =
Stopwatch.createUnstarted(
new Ticker() {
@Override
public long read() {
return android.os.SystemClock.elapsedRealtimeNanos();
}
});
}
@Override
protected void onStart() {
super.onStart();
checkStateNotNull(playerView).onResume();
}
@Override
protected void onStop() {
super.onStop();
checkStateNotNull(playerView).onPause();
releasePlayer();
cancelExport();
checkStateNotNull(exportStopwatch).reset();
}
private Composition prepareComposition() {
// Reading from resources here does not create a performance bottleneck, this
// method is called as part of more expensive operations.
String[] presetFileUris = getResources().getStringArray(/* id= */ R.array.preset_uris);
checkState(
/* expression= */ checkStateNotNull(presetFileUris).length == presetFileDescriptions.length,
/* errorMessage= */ "Unexpected array length "
+ getResources().getResourceName(R.array.preset_uris));
int[] presetDurationsUs = getResources().getIntArray(/* id= */ R.array.preset_durations);
checkState(
/* expression= */ checkStateNotNull(presetDurationsUs).length
== presetFileDescriptions.length,
/* errorMessage= */ "Unexpected array length "
+ getResources().getResourceName(R.array.preset_durations));
List<EditedMediaItem> mediaItems = new ArrayList<>();
ImmutableList<Effect> effects =
ImmutableList.of(
MatrixTransformationFactory.createDizzyCropEffect(), RgbFilter.createGrayscaleFilter());
for (int i = 0; i < checkNotNull(selectedMediaItems).length; i++) {
if (checkNotNull(selectedMediaItems)[i]) {
Uri uri = Uri.parse(presetFileUris[i]);
MediaItem.Builder mediaItemBuilder = new MediaItem.Builder().setUri(uri);
if (MimeTypes.isImage(getCommonImageMimeTypeFromExtension(uri))) {
mediaItemBuilder.setImageDurationMs(Util.usToMs(presetDurationsUs[i]));
}
MediaItem mediaItem = mediaItemBuilder.build();
SonicAudioProcessor pitchChanger = new SonicAudioProcessor();
pitchChanger.setPitch(mediaItems.size() % 2 == 0 ? 2f : 0.2f);
EditedMediaItem.Builder itemBuilder =
new EditedMediaItem.Builder(mediaItem)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(pitchChanger),
/* videoEffects= */ effects))
.setDurationUs(presetDurationsUs[i]);
mediaItems.add(itemBuilder.build());
}
}
EditedMediaItemSequence videoSequence = new EditedMediaItemSequence(mediaItems);
SonicAudioProcessor sampleRateChanger = new SonicAudioProcessor();
sampleRateChanger.setOutputSampleRateHz(8_000);
return new Composition.Builder(/* sequences= */ ImmutableList.of(videoSequence))
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(sampleRateChanger),
/* videoEffects= */ ImmutableList.of()))
.build();
}
private void previewComposition(View view) {
releasePlayer();
Composition composition = prepareComposition();
checkStateNotNull(playerView).setPlayer(null);
CompositionPlayer player = new CompositionPlayer.Builder(getApplicationContext()).build();
this.compositionPlayer = player;
checkStateNotNull(playerView).setPlayer(compositionPlayer);
checkStateNotNull(playerView).setControllerAutoShow(false);
player.addListener(
new Player.Listener() {
@Override
public void onPlayerError(PlaybackException error) {
Toast.makeText(getApplicationContext(), "Preview error: " + error, Toast.LENGTH_LONG)
.show();
Log.e(TAG, "Preview error", error);
}
});
player.setComposition(composition);
player.prepare();
player.play();
}
private void selectPresetFile(View view) {
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.select_preset_file_title)
.setMultiChoiceItems(
presetFileDescriptions,
checkNotNull(selectedMediaItems),
this::selectPresetFileInDialog)
.setPositiveButton(android.R.string.ok, /* listener= */ null)
.setCancelable(false)
.create()
.show();
}
private void selectPresetFileInDialog(DialogInterface dialog, int which, boolean isChecked) {
if (selectedMediaItems == null) {
return;
}
selectedMediaItems[which] = isChecked;
// The items will be added to a the sequence in the order they were selected.
if (isChecked) {
sequenceAssetTitles.add(presetFileDescriptions[which]);
checkNotNull(assetItemAdapter).notifyItemInserted(sequenceAssetTitles.size() - 1);
} else {
int index = sequenceAssetTitles.indexOf(presetFileDescriptions[which]);
sequenceAssetTitles.remove(presetFileDescriptions[which]);
checkNotNull(assetItemAdapter).notifyItemRemoved(index);
}
}
private void exportComposition(View view) {
// Cancel and clean up files from any ongoing export.
cancelExport();
Composition composition = prepareComposition();
try {
outputFile =
createExternalCacheFile(
"composition-preview-" + Clock.DEFAULT.elapsedRealtime() + ".mp4");
} catch (IOException e) {
Toast.makeText(
getApplicationContext(),
"Aborting export! Unable to create output file: " + e,
Toast.LENGTH_LONG)
.show();
Log.e(TAG, "Aborting export! Unable to create output file: " + e);
return;
}
String filePath = outputFile.getAbsolutePath();
transformer =
new Transformer.Builder(this)
.addListener(
new Transformer.Listener() {
@Override
public void onCompleted(Composition composition, ExportResult exportResult) {
checkStateNotNull(exportStopwatch).stop();
long elapsedTimeMs = exportStopwatch.elapsed(TimeUnit.MILLISECONDS);
String details =
getString(R.string.export_completed, elapsedTimeMs / 1000.f, filePath);
Log.i(TAG, details);
checkStateNotNull(exportInformationTextView).setText(details);
try {
JSONObject resultJson =
JsonUtil.exportResultAsJsonObject(exportResult)
.put("elapsedTimeMs", elapsedTimeMs)
.put("device", JsonUtil.getDeviceDetailsAsJsonObject());
for (String line : Util.split(resultJson.toString(2), "\n")) {
Log.i(TAG, line);
}
} catch (JSONException e) {
Log.w(TAG, "Unable to convert exportResult to JSON", e);
}
}
@Override
public void onError(
Composition composition,
ExportResult exportResult,
ExportException exportException) {
checkStateNotNull(exportStopwatch).stop();
Toast.makeText(
getApplicationContext(),
"Export error: " + exportException,
Toast.LENGTH_LONG)
.show();
Log.e(TAG, "Export error", exportException);
checkStateNotNull(exportInformationTextView).setText(R.string.export_error);
}
})
.build();
checkStateNotNull(exportInformationTextView).setText(R.string.export_started);
checkStateNotNull(exportStopwatch).reset();
exportStopwatch.start();
checkStateNotNull(transformer).start(composition, filePath);
Log.i(TAG, "Export started");
}
private void releasePlayer() {
if (compositionPlayer != null) {
compositionPlayer.release();
compositionPlayer = null;
}
}
/** Cancels any ongoing export operation, and deletes output file contents. */
private void cancelExport() {
if (transformer != null) {
transformer.cancel();
transformer = null;
}
if (outputFile != null) {
outputFile.delete();
outputFile = null;
}
checkStateNotNull(exportInformationTextView).setText("");
}
/**
* Creates a {@link File} of the {@code fileName} in the application cache directory.
*
* <p>If a file of that name already exists, it is overwritten.
*/
// TODO: b/320636291 - Refactor duplicate createExternalCacheFile functions.
private File createExternalCacheFile(String fileName) throws IOException {
File file = new File(getExternalCacheDir(), fileName);
if (file.exists() && !file.delete()) {
throw new IOException("Could not delete file: " + file.getAbsolutePath());
}
if (!file.createNewFile()) {
throw new IOException("Could not create file: " + file.getAbsolutePath());
}
return file;
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.demo.composition;
import android.graphics.Matrix;
import androidx.media3.common.C;
import androidx.media3.common.util.Util;
import androidx.media3.effect.GlMatrixTransformation;
import androidx.media3.effect.MatrixTransformation;
/**
* Factory for {@link GlMatrixTransformation GlMatrixTransformations} and {@link
* MatrixTransformation MatrixTransformations} that create video effects by applying transformation
* matrices to the individual video frames.
*/
/* package */ final class MatrixTransformationFactory {
/**
* Returns a {@link MatrixTransformation} that rescales the frames over the first {@link
* #ZOOM_DURATION_SECONDS} seconds, such that the rectangle filled with the input frame increases
* linearly in size from a single point to filling the full output frame.
*/
public static MatrixTransformation createZoomInTransition() {
return MatrixTransformationFactory::calculateZoomInTransitionMatrix;
}
/**
* Returns a {@link MatrixTransformation} that crops frames to a rectangle that moves on an
* ellipse.
*/
public static MatrixTransformation createDizzyCropEffect() {
return MatrixTransformationFactory::calculateDizzyCropMatrix;
}
/**
* Returns a {@link GlMatrixTransformation} that rotates a frame in 3D around the y-axis and
* applies perspective projection to 2D.
*/
public static GlMatrixTransformation createSpin3dEffect() {
return MatrixTransformationFactory::calculate3dSpinMatrix;
}
private static final float ZOOM_DURATION_SECONDS = 2f;
private static final float DIZZY_CROP_ROTATION_PERIOD_US = 5_000_000f;
private static Matrix calculateZoomInTransitionMatrix(long presentationTimeUs) {
Matrix transformationMatrix = new Matrix();
float scale = Math.min(1, presentationTimeUs / (C.MICROS_PER_SECOND * ZOOM_DURATION_SECONDS));
transformationMatrix.postScale(/* sx= */ scale, /* sy= */ scale);
return transformationMatrix;
}
private static android.graphics.Matrix calculateDizzyCropMatrix(long presentationTimeUs) {
double theta = presentationTimeUs * 2 * Math.PI / DIZZY_CROP_ROTATION_PERIOD_US;
float centerX = 0.5f * (float) Math.cos(theta);
float centerY = 0.5f * (float) Math.sin(theta);
android.graphics.Matrix transformationMatrix = new android.graphics.Matrix();
transformationMatrix.postTranslate(/* dx= */ centerX, /* dy= */ centerY);
transformationMatrix.postScale(/* sx= */ 2f, /* sy= */ 2f);
return transformationMatrix;
}
private static float[] calculate3dSpinMatrix(long presentationTimeUs) {
float[] transformationMatrix = new float[16];
android.opengl.Matrix.frustumM(
transformationMatrix,
/* offset= */ 0,
/* left= */ -1f,
/* right= */ 1f,
/* bottom= */ -1f,
/* top= */ 1f,
/* near= */ 3f,
/* far= */ 5f);
android.opengl.Matrix.translateM(
transformationMatrix, /* mOffset= */ 0, /* x= */ 0f, /* y= */ 0f, /* z= */ -4f);
float theta = Util.usToMs(presentationTimeUs) / 10f;
android.opengl.Matrix.rotateM(
transformationMatrix, /* mOffset= */ 0, theta, /* x= */ 0f, /* y= */ 1f, /* z= */ 0f);
return transformationMatrix;
}
}

View File

@ -0,0 +1,22 @@
/*
* Copyright (C) 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
@OptIn(markerClass = UnstableApi.class)
package androidx.media3.demo.composition;
import androidx.annotation.OptIn;
import androidx.media3.common.util.NonNullApi;
import androidx.media3.common.util.UnstableApi;

View File

@ -0,0 +1,117 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:padding="16dp">
<com.google.android.material.card.MaterialCardView
android:id="@+id/composition_preview_card_view"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:cardCornerRadius="4dp"
app:cardElevation="2dp">
<LinearLayout
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="wrap_content" >
<androidx.appcompat.widget.AppCompatTextView
android:id="@+id/input_text_view"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="8dp"
android:padding="8dp"
android:textAppearance="@style/TextAppearance.AppCompat.Medium"
android:text="@string/preview_single_sequence" />
<FrameLayout
android:layout_width="match_parent"
android:layout_height="200dp" >
<androidx.media3.ui.PlayerView
android:id="@+id/composition_player_view"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</FrameLayout>
</LinearLayout>
</com.google.android.material.card.MaterialCardView>
<androidx.appcompat.widget.AppCompatTextView
android:id="@+id/sequence_header_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/single_sequence_items"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/composition_preview_card_view"
app:layout_constraintBottom_toTopOf="@id/composition_preset_list"/>
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/edit_sequence_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textAppearance="@style/TextAppearance.AppCompat.Small"
android:text="@string/edit"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toBottomOf="@id/composition_preview_card_view"/>
<androidx.recyclerview.widget.RecyclerView
android:id="@+id/composition_preset_list"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_marginTop="16dp"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/edit_sequence_button"
app:layout_constraintBottom_toTopOf="@id/export_information_text"/>
<androidx.appcompat.widget.AppCompatTextView
android:id="@+id/export_information_text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintBottom_toTopOf="@id/composition_export_button"/>
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/composition_export_button"
android:text="@string/export"
android:layout_marginTop="16dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintBottom_toTopOf="@id/preview_button"/>
<androidx.appcompat.widget.AppCompatButton
android:id="@+id/preview_button"
android:text="@string/preview"
android:layout_marginTop="16dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintBottom_toBottomOf="parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<androidx.appcompat.widget.LinearLayoutCompat
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:padding="8dp">
<androidx.appcompat.widget.AppCompatTextView
android:id="@+id/preset_name_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_weight="1"/>
</androidx.appcompat.widget.LinearLayoutCompat>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.Media3internal" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_200</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/black</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_200</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,74 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string-array name="preset_descriptions">
<item>720p H264 video and AAC audio</item>
<item>1080p H265 video and AAC audio</item>
<item>360p H264 video and AAC audio</item>
<item>360p VP8 video and Vorbis audio</item>
<item>4K H264 video and AAC audio (portrait, no B-frames)</item>
<item>8k H265 video and AAC audio</item>
<item>Short 1080p H265 video and AAC audio</item>
<item>Long 180p H264 video and AAC audio</item>
<item>H264 video and AAC audio (portrait, H &gt; W, 0°)</item>
<item>H264 video and AAC audio (portrait, H &lt; W, 90°)</item>
<item>SEF slow motion with 240 fps</item>
<item>480p DASH (non-square pixels)</item>
<item>HDR (HDR10) H265 limited range video (encoding may fail)</item>
<item>HDR (HLG) H265 limited range video (encoding may fail)</item>
<item>720p H264 video with no audio</item>
<item>London JPG image (plays for 5 secs at 30 fps)</item>
<item>Tokyo JPG image (portrait, plays for 5 secs at 30 fps)</item>
</string-array>
<string-array name="preset_uris">
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/android-screens-10s.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-0/android-block-1080-hevc.mp4</item>
<item>https://html5demos.com/assets/dizzy.mp4</item>
<item>https://html5demos.com/assets/dizzy.webm</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_4k60.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/8k24fps_4s.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/1920w_1080h_4s.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-0/BigBuckBunny_320x180.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_avc_aac.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_rotated_avc_aac.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/slow-motion/slowMotion_stopwatch_240fps_long.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/gen/screens/dash-vod-single-segment/manifest-baseline.mpd</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/samsung-s21-hdr-hdr10.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/Pixel7Pro_HLG_1080P.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/mp4/sample_video_track_only.mp4</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/jpg/london.jpg</item>
<item>https://storage.googleapis.com/exoplayer-test-media-1/jpg/tokyo.jpg</item>
</string-array>
<integer-array name="preset_durations">
<item>10024000</item>
<item>23823000</item>
<item>25000000</item>
<item>25000000</item>
<item>3745000</item>
<item>4421000</item>
<item>3923000</item>
<item>596459000</item>
<item>3687000</item>
<item>2235000</item>
<item>47987000</item>
<item>128270000</item>
<item>4236000</item>
<item>5167000</item>
<item>1001000</item>
<item>5000000</item>
<item>5000000</item>
</integer-array>
</resources>

View File

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="app_name">Composition Demo</string>
<string name="edit">Edit</string>
<string name="preview" translatable="false">Preview</string>
<string name="preview_single_sequence" translatable="false">Single sequence preview</string>
<string name="single_sequence_items" translatable="false">Single sequence items:</string>
<string name="select_preset_file_title" translatable="false">Choose preset file</string>
<string name="export" translatable="false">Export</string>
<string name="export_completed" translatable="false">Export completed in %.3f seconds.\nOutput: %s</string>
<string name="export_error" translatable="false">Export error</string>
<string name="export_started" translatable="false">Export started</string>
</resources>

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2024 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.Media3internal" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,529 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.PlaybackException.ERROR_CODE_DECODER_INIT_FAILED;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static org.junit.Assert.assertThrows;
import android.app.Instrumentation;
import android.content.Context;
import android.graphics.BitmapFactory;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.DebugViewProvider;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.SystemClock;
import androidx.media3.common.util.Util;
import androidx.media3.datasource.AssetDataSource;
import androidx.media3.datasource.DataSourceUtil;
import androidx.media3.datasource.DataSpec;
import androidx.media3.effect.PreviewingSingleInputVideoGraph;
import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.image.BitmapFactoryImageDecoder;
import androidx.media3.exoplayer.image.ImageDecoder;
import androidx.media3.exoplayer.image.ImageDecoderException;
import androidx.media3.exoplayer.source.ExternalLoader;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Instrumentation tests for {@link CompositionPlayer} */
@RunWith(AndroidJUnit4.class)
public class CompositionPlayerTest {
private static final long TEST_TIMEOUT_MS = 10_000;
private static final String MP4_ASSET = "asset:///media/mp4/sample.mp4";
private static final String IMAGE_ASSET = "asset:///media/jpeg/white-1x1.jpg";
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
private final Context applicationContext = instrumentation.getContext().getApplicationContext();
private CompositionPlayer compositionPlayer;
private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private TextureView textureView;
@Before
public void setupSurfaces() {
rule.getScenario()
.onActivity(
activity -> {
surfaceView = activity.getSurfaceView();
textureView = activity.getTextureView();
});
surfaceHolder = surfaceView.getHolder();
}
@After
public void closeActivity() {
rule.getScenario().close();
}
@After
public void releasePlayer() {
instrumentation.runOnMainSync(
() -> {
if (compositionPlayer != null) {
compositionPlayer.release();
}
});
}
@Test
public void setVideoSurfaceView_beforeSettingComposition_surfaceViewIsPassed() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET))
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
}
@Test
public void setVideoSurfaceView_afterSettingComposition_surfaceViewIsPassed() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET))
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
}
@Test
public void setVideoSurfaceHolder_beforeSettingComposition_surfaceHolderIsPassed()
throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer.setVideoSurfaceHolder(surfaceHolder);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET))
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
}
@Test
public void setVideoSurfaceHolder_afterSettingComposition_surfaceHolderIsPassed()
throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET))
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.setVideoSurfaceHolder(surfaceHolder);
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
}
@Test
public void setVideoTextureView_throws() {
AtomicReference<UnsupportedOperationException> exception = new AtomicReference<>();
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
try {
compositionPlayer.setVideoTextureView(textureView);
} catch (UnsupportedOperationException e) {
exception.set(e);
}
});
assertThat(exception.get()).isNotNull();
}
@Test
public void imagePreview_imagePlaysForSetDuration() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(
new MediaItem.Builder()
.setUri(IMAGE_ASSET)
.setImageDurationMs(1_000)
.build())
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
listener.waitUntilPlayerReady();
long playbackStartTimeMs = SystemClock.DEFAULT.elapsedRealtime();
instrumentation.runOnMainSync(() -> compositionPlayer.play());
listener.waitUntilPlayerEnded();
long playbackRealTimeMs = SystemClock.DEFAULT.elapsedRealtime() - playbackStartTimeMs;
assertThat(playbackRealTimeMs).isAtLeast(1_000);
}
@Test
public void imagePreview_externallyLoadedImage() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
ExternalLoader externalImageLoader =
loadRequest -> immediateFuture(Util.getUtf8Bytes(loadRequest.uri.toString()));
instrumentation.runOnMainSync(
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setExternalImageLoader(externalImageLoader)
.setImageDecoderFactory(new TestImageDecoderFactory())
.build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(
new MediaItem.Builder()
.setUri(IMAGE_ASSET)
.setMimeType(MimeTypes.APPLICATION_EXTERNALLY_LOADED_IMAGE)
.setImageDurationMs(1_000)
.build())
.setDurationUs(1_000_000)
.build()))
.build());
compositionPlayer.prepare();
});
listener.waitUntilFirstFrameRendered();
}
@Test
public void imagePreview_twoImages() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem image =
new EditedMediaItem.Builder(
new MediaItem.Builder().setUri(IMAGE_ASSET).setImageDurationMs(500).build())
.setDurationUs(500_000)
.build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(image, image)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
listener.waitUntilPlayerEnded();
}
@Test
public void composition_imageThenVideo() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem image =
new EditedMediaItem.Builder(
new MediaItem.Builder().setUri(IMAGE_ASSET).setImageDurationMs(500).build())
.setDurationUs(500_000)
.build();
EditedMediaItem video =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET)).setDurationUs(1_000_000).build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(image, video)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
listener.waitUntilPlayerEnded();
}
@Test
public void composition_videoThenImage() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem video =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET)).setDurationUs(1_000_000).build();
EditedMediaItem image =
new EditedMediaItem.Builder(
new MediaItem.Builder().setUri(IMAGE_ASSET).setImageDurationMs(500).build())
.setDurationUs(500_000)
.build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(video, image)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
listener.waitUntilPlayerEnded();
}
@Test
public void playback_videoSinkProviderFails_playerRaisesError() {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem video =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET)).setDurationUs(1_000_000).build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(
(context,
outputColorInfo,
debugViewProvider,
graphListener,
listenerExecutor,
compositionEffects,
initialTimestampOffsetUs) -> {
throw new VideoFrameProcessingException(
"Test video graph failed to initialize");
})
.build();
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(video)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
PlaybackException thrownException =
assertThrows(PlaybackException.class, listener::waitUntilPlayerEnded);
assertThat(thrownException.errorCode).isEqualTo(ERROR_CODE_DECODER_INIT_FAILED);
}
@Test
public void release_videoSinkProviderFailsDuringRelease_playerDoesNotRaiseError()
throws Exception {
PlayerTestListener playerTestListener = new PlayerTestListener(TEST_TIMEOUT_MS);
EditedMediaItem video =
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET)).setDurationUs(1_000_000).build();
instrumentation.runOnMainSync(
() -> {
compositionPlayer =
new CompositionPlayer.Builder(applicationContext)
.setPreviewingVideoGraphFactory(FailingReleaseVideoGraph::new)
.build();
compositionPlayer.addListener(playerTestListener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(video)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
// Wait until the player is ended to make sure the VideoGraph has been created.
playerTestListener.waitUntilPlayerEnded();
instrumentation.runOnMainSync(compositionPlayer::release);
playerTestListener.waitUntilPlayerIdle();
}
private static final class TestImageDecoderFactory implements ImageDecoder.Factory {
@Override
public @RendererCapabilities.Capabilities int supportsFormat(Format format) {
return format.sampleMimeType != null
&& format.sampleMimeType.equals(MimeTypes.APPLICATION_EXTERNALLY_LOADED_IMAGE)
? RendererCapabilities.create(C.FORMAT_HANDLED)
: RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
}
@Override
public ImageDecoder createImageDecoder() {
return new BitmapFactoryImageDecoder.Factory(
/* bitmapDecoder= */ (data, length) -> {
try {
// The test serializes the image URI string to a byte array.
String assetPath = new String(data);
AssetDataSource assetDataSource =
new AssetDataSource(ApplicationProvider.getApplicationContext());
assetDataSource.open(new DataSpec.Builder().setUri(assetPath).build());
byte[] imageData = DataSourceUtil.readToEnd(assetDataSource);
return BitmapFactory.decodeByteArray(imageData, 0, imageData.length);
} catch (IOException e) {
throw new ImageDecoderException(e);
}
})
.createImageDecoder();
}
}
private static final class FailingReleaseVideoGraph extends ForwardingVideoGraph {
public FailingReleaseVideoGraph(
Context context,
ColorInfo outputColorInfo,
DebugViewProvider debugViewProvider,
VideoGraph.Listener listener,
Executor listenerExecutor,
List<Effect> compositionEffects,
long initialTimestampOffsetUs) {
super(
new PreviewingSingleInputVideoGraph.Factory()
.create(
context,
outputColorInfo,
debugViewProvider,
listener,
listenerExecutor,
compositionEffects,
initialTimestampOffsetUs));
}
@Override
public void release() {
throw new RuntimeException("VideoGraph release error");
}
}
private static class ForwardingVideoGraph implements PreviewingVideoGraph {
private final PreviewingVideoGraph videoGraph;
public ForwardingVideoGraph(PreviewingVideoGraph videoGraph) {
this.videoGraph = videoGraph;
}
@Override
public void initialize() throws VideoFrameProcessingException {
videoGraph.initialize();
}
@Override
public void registerInput(int inputIndex) throws VideoFrameProcessingException {
videoGraph.registerInput(inputIndex);
}
@Override
public VideoFrameProcessor getProcessor(int inputId) {
return videoGraph.getProcessor(inputId);
}
@Override
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
videoGraph.setOutputSurfaceInfo(outputSurfaceInfo);
}
@Override
public boolean hasProducedFrameWithTimestampZero() {
return videoGraph.hasProducedFrameWithTimestampZero();
}
@Override
public void release() {
videoGraph.release();
}
@Override
public void renderOutputFrame(long renderTimeNs) {
videoGraph.renderOutputFrame(renderTimeNs);
}
}
}

View File

@ -0,0 +1,154 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh.performance;
import static androidx.media3.common.MimeTypes.VIDEO_H264;
import static androidx.media3.test.utils.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE;
import static androidx.media3.test.utils.BitmapPixelTestUtil.createArgb8888BitmapFromRgba8888Image;
import static androidx.media3.test.utils.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static androidx.media3.transformer.mh.performance.PlaybackTestUtil.createTimestampOverlay;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import android.app.Instrumentation;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import androidx.media3.common.Format;
import androidx.media3.common.MediaItem;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.collect.ImmutableList;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
/** Playback tests for {@link CompositionPlayer} */
@RunWith(AndroidJUnit4.class)
public class CompositionPlaybackTest {
private static final String TEST_DIRECTORY = "test-generated-goldens/ExoPlayerPlaybackTest";
private static final String MP4_ASSET_URI_STRING = "asset:///media/mp4/sample.mp4";
private static final Format MP4_ASSET_FORMAT =
new Format.Builder()
.setSampleMimeType(VIDEO_H264)
.setWidth(1080)
.setHeight(720)
.setFrameRate(29.97f)
.setCodecs("avc1.64001F")
.build();
private static final Size MP4_ASSET_VIDEO_SIZE =
new Size(MP4_ASSET_FORMAT.width, MP4_ASSET_FORMAT.height);
private static final long TEST_TIMEOUT_MS = 10_000;
@Rule public final TestName testName = new TestName();
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
private @MonotonicNonNull CompositionPlayer player;
private @MonotonicNonNull ImageReader outputImageReader;
private String testId;
@Before
public void setUpTestId() {
testId = testName.getMethodName();
}
@After
public void tearDown() {
instrumentation.runOnMainSync(
() -> {
if (player != null) {
player.release();
}
if (outputImageReader != null) {
outputImageReader.close();
}
});
}
@Test
public void compositionPlayerPreviewTest_ensuresFirstFrameRenderedCorrectly() throws Exception {
AtomicReference<Bitmap> renderedFirstFrameBitmap = new AtomicReference<>();
ConditionVariable hasRenderedFirstFrameCondition = new ConditionVariable();
outputImageReader =
ImageReader.newInstance(
MP4_ASSET_VIDEO_SIZE.getWidth(),
MP4_ASSET_VIDEO_SIZE.getHeight(),
PixelFormat.RGBA_8888,
/* maxImages= */ 1);
instrumentation.runOnMainSync(
() -> {
player = new CompositionPlayer.Builder(instrumentation.getContext()).build();
outputImageReader.setOnImageAvailableListener(
imageReader -> {
try (Image image = imageReader.acquireLatestImage()) {
renderedFirstFrameBitmap.set(createArgb8888BitmapFromRgba8888Image(image));
}
hasRenderedFirstFrameCondition.open();
},
Util.createHandlerForCurrentOrMainLooper());
player.setVideoSurface(outputImageReader.getSurface(), MP4_ASSET_VIDEO_SIZE);
player.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET_URI_STRING))
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(
createTimestampOverlay())))
.setDurationUs(1_024_000L)
.build()))
.build());
player.prepare();
});
if (!hasRenderedFirstFrameCondition.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException(
Util.formatInvariant("First frame not rendered in %d ms.", TEST_TIMEOUT_MS));
}
assertWithMessage("First frame is not rendered.")
.that(renderedFirstFrameBitmap.get())
.isNotNull();
float averagePixelAbsoluteDifference =
getBitmapAveragePixelAbsoluteDifferenceArgb8888(
/* expected= */ readBitmap(TEST_DIRECTORY + "/first_frame.png"),
/* actual= */ renderedFirstFrameBitmap.get(),
testId);
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
// TODO: b/315800590 - Verify onFirstFrameRendered is invoked only once.
}
}

View File

@ -0,0 +1,380 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh.performance;
import static com.google.common.truth.Truth.assertThat;
import android.app.Instrumentation;
import android.content.Context;
import android.view.SurfaceView;
import androidx.media3.common.Effect;
import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.MediaItem;
import androidx.media3.effect.GlEffect;
import androidx.media3.effect.PassthroughShaderProgram;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.PlayerTestListener;
import androidx.media3.transformer.SurfaceTestActivity;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Instrumentation tests for {@link CompositionPlayer} {@linkplain CompositionPlayer#seekTo
* seeking}.
*/
@RunWith(AndroidJUnit4.class)
public class CompositionPlayerSeekTest {
private static final long TEST_TIMEOUT_MS = 10_000;
private static final String MP4_ASSET = "asset:///media/mp4/sample.mp4";
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
private final Context applicationContext = instrumentation.getContext().getApplicationContext();
private CompositionPlayer compositionPlayer;
private SurfaceView surfaceView;
@Before
public void setupSurfaces() {
rule.getScenario().onActivity(activity -> surfaceView = activity.getSurfaceView());
}
@After
public void closeActivity() {
rule.getScenario().close();
}
@After
public void releasePlayer() {
instrumentation.runOnMainSync(
() -> {
if (compositionPlayer != null) {
compositionPlayer.release();
}
});
}
// TODO: b/320244483 - Add tests that seek into the middle of the sequence.
@Test
public void seekToZero_singleSequenceOfTwoVideos() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS * 1000);
InputTimestampRecordingShaderProgram inputTimestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram();
EditedMediaItem video =
createEditedMediaItem(
/* videoEffects= */ ImmutableList.of(
(GlEffect) (context, useHdr) -> inputTimestampRecordingShaderProgram));
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(video, video)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
listener.waitUntilPlayerEnded();
listener.resetStatus();
instrumentation.runOnMainSync(() -> compositionPlayer.seekTo(0));
listener.waitUntilPlayerEnded();
ImmutableList<Long> timestampsUsOfOneSequence =
ImmutableList.of(
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
1000000500500L,
1000000533866L,
1000000567233L,
1000000600600L,
1000000633966L,
1000000667333L,
1000000700700L,
1000000734066L,
1000000767433L,
1000000800800L,
1000000834166L,
1000000867533L,
1000000900900L,
1000000934266L,
1000000967633L,
// Second video starts here.
1000001024000L,
1000001057366L,
1000001090733L,
1000001124100L,
1000001157466L,
1000001190833L,
1000001224200L,
1000001257566L,
1000001290933L,
1000001324300L,
1000001357666L,
1000001391033L,
1000001424400L,
1000001457766L,
1000001491133L,
1000001524500L,
1000001557866L,
1000001591233L,
1000001624600L,
1000001657966L,
1000001691333L,
1000001724700L,
1000001758066L,
1000001791433L,
1000001824800L,
1000001858166L,
1000001891533L,
1000001924900L,
1000001958266L,
1000001991633L);
assertThat(inputTimestampRecordingShaderProgram.timestampsUs)
// Seeked after the first playback ends, so the timestamps are repeated twice.
.containsExactlyElementsIn(
new ImmutableList.Builder<Long>()
.addAll(timestampsUsOfOneSequence)
.addAll(timestampsUsOfOneSequence)
.build())
.inOrder();
}
@Test
public void seekToZero_after15framesInSingleSequenceOfTwoVideos() throws Exception {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS * 1000);
ResettableCountDownLatch framesReceivedLatch = new ResettableCountDownLatch(15);
AtomicBoolean shaderProgramShouldBlockInput = new AtomicBoolean();
InputTimestampRecordingShaderProgram inputTimestampRecordingShaderProgram =
new InputTimestampRecordingShaderProgram() {
@Override
public void queueInputFrame(
GlObjectsProvider glObjectsProvider,
GlTextureInfo inputTexture,
long presentationTimeUs) {
super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
framesReceivedLatch.countDown();
if (framesReceivedLatch.getCount() == 0) {
shaderProgramShouldBlockInput.set(true);
}
}
@Override
public void releaseOutputFrame(GlTextureInfo outputTexture) {
// The input listener capacity is reported in the super method, block input by skip
// reporting input capacity.
if (shaderProgramShouldBlockInput.get()) {
return;
}
super.releaseOutputFrame(outputTexture);
}
@Override
public void flush() {
super.flush();
shaderProgramShouldBlockInput.set(false);
framesReceivedLatch.reset(Integer.MAX_VALUE);
}
};
EditedMediaItem video =
createEditedMediaItem(
/* videoEffects= */ ImmutableList.of(
(GlEffect) (context, useHdr) -> inputTimestampRecordingShaderProgram));
instrumentation.runOnMainSync(
() -> {
compositionPlayer = new CompositionPlayer.Builder(applicationContext).build();
// Set a surface on the player even though there is no UI on this test. We need a surface
// otherwise the player will skip/drop video frames.
compositionPlayer.setVideoSurfaceView(surfaceView);
compositionPlayer.addListener(listener);
compositionPlayer.setComposition(
new Composition.Builder(new EditedMediaItemSequence(video, video)).build());
compositionPlayer.prepare();
compositionPlayer.play();
});
// Wait until the number of frames are received, block further input on the shader program.
framesReceivedLatch.await();
instrumentation.runOnMainSync(() -> compositionPlayer.seekTo(0));
listener.waitUntilPlayerEnded();
ImmutableList<Long> expectedTimestampsUs =
ImmutableList.of(
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
// 15 frames, seek
1000000000000L,
1000000033366L,
1000000066733L,
1000000100100L,
1000000133466L,
1000000166833L,
1000000200200L,
1000000233566L,
1000000266933L,
1000000300300L,
1000000333666L,
1000000367033L,
1000000400400L,
1000000433766L,
1000000467133L,
1000000500500L,
1000000533866L,
1000000567233L,
1000000600600L,
1000000633966L,
1000000667333L,
1000000700700L,
1000000734066L,
1000000767433L,
1000000800800L,
1000000834166L,
1000000867533L,
1000000900900L,
1000000934266L,
1000000967633L,
// Second video starts here.
1000001024000L,
1000001057366L,
1000001090733L,
1000001124100L,
1000001157466L,
1000001190833L,
1000001224200L,
1000001257566L,
1000001290933L,
1000001324300L,
1000001357666L,
1000001391033L,
1000001424400L,
1000001457766L,
1000001491133L,
1000001524500L,
1000001557866L,
1000001591233L,
1000001624600L,
1000001657966L,
1000001691333L,
1000001724700L,
1000001758066L,
1000001791433L,
1000001824800L,
1000001858166L,
1000001891533L,
1000001924900L,
1000001958266L,
1000001991633L);
assertThat(inputTimestampRecordingShaderProgram.timestampsUs)
.containsExactlyElementsIn(expectedTimestampsUs)
.inOrder();
}
private static EditedMediaItem createEditedMediaItem(List<Effect> videoEffects) {
return new EditedMediaItem.Builder(MediaItem.fromUri(MP4_ASSET))
.setDurationUs(1_024_000)
.setEffects(new Effects(/* audioProcessors= */ ImmutableList.of(), videoEffects))
.build();
}
private static class InputTimestampRecordingShaderProgram extends PassthroughShaderProgram {
public final ArrayList<Long> timestampsUs = new ArrayList<>();
@Override
public void queueInputFrame(
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
timestampsUs.add(presentationTimeUs);
}
}
private static final class ResettableCountDownLatch {
private CountDownLatch latch;
public ResettableCountDownLatch(int count) {
latch = new CountDownLatch(count);
}
public void await() throws InterruptedException {
latch.await();
}
public void countDown() {
latch.countDown();
}
public long getCount() {
return latch.getCount();
}
public void reset(int count) {
latch = new CountDownLatch(count);
}
}
}

View File

@ -0,0 +1,141 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer.mh.performance;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.transformer.AndroidTestUtil.MP4_ASSET_URI_STRING;
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
import static com.google.common.truth.Truth.assertThat;
import android.app.Instrumentation;
import android.os.SystemClock;
import android.view.SurfaceView;
import androidx.media3.common.Effect;
import androidx.media3.common.MediaItem;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.util.Util;
import androidx.media3.effect.Contrast;
import androidx.media3.transformer.Composition;
import androidx.media3.transformer.CompositionPlayer;
import androidx.media3.transformer.EditedMediaItem;
import androidx.media3.transformer.EditedMediaItemSequence;
import androidx.media3.transformer.Effects;
import androidx.media3.transformer.PlayerTestListener;
import androidx.media3.transformer.SurfaceTestActivity;
import androidx.test.ext.junit.rules.ActivityScenarioRule;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Range;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Performance tests for the composition previewing pipeline in {@link CompositionPlayer}. */
@RunWith(AndroidJUnit4.class)
public class VideoCompositionPreviewPerformanceTest {
private static final long TEST_TIMEOUT_MS = 10_000;
private static final long MEDIA_ITEM_CLIP_DURATION_MS = 500;
@Rule
public ActivityScenarioRule<SurfaceTestActivity> rule =
new ActivityScenarioRule<>(SurfaceTestActivity.class);
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
private SurfaceView surfaceView;
private @MonotonicNonNull CompositionPlayer player;
@Before
public void setUpSurface() {
rule.getScenario().onActivity(activity -> surfaceView = activity.getSurfaceView());
}
@After
public void tearDown() {
instrumentation.runOnMainSync(
() -> {
if (player != null) {
player.release();
}
});
rule.getScenario().close();
}
/**
* This test guards against performance regressions in the effects preview pipeline that format
* switches do not cause the player to stall.
*/
@Test
public void compositionPlayerCompositionPreviewTest() throws PlaybackException, TimeoutException {
PlayerTestListener listener = new PlayerTestListener(TEST_TIMEOUT_MS);
instrumentation.runOnMainSync(
() -> {
player = new CompositionPlayer.Builder(getApplicationContext()).build();
player.setVideoSurfaceView(surfaceView);
player.setPlayWhenReady(false);
player.addListener(listener);
player.setComposition(
new Composition.Builder(
new EditedMediaItemSequence(
getClippedEditedMediaItem(MP4_ASSET_URI_STRING, new Contrast(.2f)),
getClippedEditedMediaItem(MP4_ASSET_URI_STRING, new Contrast(-.2f))))
.build());
player.prepare();
});
listener.waitUntilPlayerReady();
AtomicLong playbackStartTimeMs = new AtomicLong();
instrumentation.runOnMainSync(
() -> {
playbackStartTimeMs.set(SystemClock.elapsedRealtime());
checkNotNull(player).play();
});
listener.waitUntilPlayerEnded();
long compositionDurationMs = MEDIA_ITEM_CLIP_DURATION_MS * 2;
long playbackDurationMs = SystemClock.elapsedRealtime() - playbackStartTimeMs.get();
assertThat(playbackDurationMs)
.isIn(Range.closed(compositionDurationMs, compositionDurationMs + 250));
}
private static EditedMediaItem getClippedEditedMediaItem(String uri, Effect effect) {
MediaItem mediaItem =
new MediaItem.Builder()
.setUri(uri)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setEndPositionMs(MEDIA_ITEM_CLIP_DURATION_MS)
.build())
.build();
return new EditedMediaItem.Builder(mediaItem)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(effect)))
.setDurationUs(Util.msToUs(MEDIA_ITEM_CLIP_DURATION_MS))
.build();
}
}

View File

@ -0,0 +1,298 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.audio.AudioProcessor.EMPTY_BUFFER;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import androidx.annotation.Nullable;
import androidx.media3.common.AudioAttributes;
import androidx.media3.common.AuxEffectInfo;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackParameters;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.exoplayer.audio.AudioSink;
import java.nio.ByteBuffer;
import java.util.Objects;
/**
* An {@link AudioSink} implementation that feeds an {@link AudioGraphInput}.
*
* <p>Should be used by {@link PreviewAudioPipeline}.
*/
/* package */ final class AudioGraphInputAudioSink implements AudioSink {
/**
* Controller for {@link AudioGraphInputAudioSink}.
*
* <p>All methods will be called on the playback thread of the ExoPlayer instance writing to this
* sink.
*/
public interface Controller {
/**
* Returns the {@link AudioGraphInput} instance associated with this {@linkplain
* AudioGraphInputAudioSink sink}.
*
* <p>Data {@linkplain #handleBuffer written} to the sink will be {@linkplain
* AudioGraphInput#queueInputBuffer() queued} to the {@link AudioGraphInput}.
*
* @param editedMediaItem The first {@link EditedMediaItem} queued to the {@link
* AudioGraphInput}.
* @param format The {@link Format} used to {@linkplain AudioGraphInputAudioSink#configure
* configure} the {@linkplain AudioGraphInputAudioSink sink}.
* @return The {@link AudioGraphInput}.
* @throws ExportException If there is a problem initializing the {@linkplain AudioGraphInput
* input}.
*/
AudioGraphInput getAudioGraphInput(EditedMediaItem editedMediaItem, Format format)
throws ExportException;
/**
* Returns the position (in microseconds) that should be {@linkplain
* AudioSink#getCurrentPositionUs returned} by this sink.
*/
long getCurrentPositionUs();
/** Returns whether the controller is ended. */
boolean isEnded();
/** See {@link #play()}. */
default void onPlay() {}
/** See {@link #pause()}. */
default void onPause() {}
/** See {@link #reset()}. */
default void onReset() {}
}
private final Controller controller;
@Nullable private AudioGraphInput outputGraphInput;
@Nullable private Format currentInputFormat;
private boolean inputStreamEnded;
private boolean signalledEndOfStream;
@Nullable private EditedMediaItemInfo currentEditedMediaItemInfo;
private long offsetToCompositionTimeUs;
public AudioGraphInputAudioSink(Controller controller) {
this.controller = controller;
}
/**
* Informs the audio sink there is a change on the {@link EditedMediaItem} currently rendered by
* the renderer.
*
* @param editedMediaItem The {@link EditedMediaItem}.
* @param offsetToCompositionTimeUs The offset to add to the audio buffer timestamps to convert
* them to the composition time, in microseconds.
* @param isLastInSequence Whether this is the last item in the sequence.
*/
public void onMediaItemChanged(
EditedMediaItem editedMediaItem, long offsetToCompositionTimeUs, boolean isLastInSequence) {
currentEditedMediaItemInfo = new EditedMediaItemInfo(editedMediaItem, isLastInSequence);
this.offsetToCompositionTimeUs = offsetToCompositionTimeUs;
}
// AudioSink methods
@Override
public void configure(Format inputFormat, int specifiedBufferSize, @Nullable int[] outputChannels)
throws ConfigurationException {
checkArgument(supportsFormat(inputFormat));
EditedMediaItem editedMediaItem = checkStateNotNull(currentEditedMediaItemInfo).editedMediaItem;
// TODO(b/303029969): Evaluate throwing vs ignoring for null outputChannels.
checkArgument(outputChannels == null);
this.currentInputFormat = inputFormat;
if (outputGraphInput == null) {
try {
outputGraphInput = controller.getAudioGraphInput(editedMediaItem, currentInputFormat);
} catch (ExportException e) {
throw new ConfigurationException(e, currentInputFormat);
}
}
outputGraphInput.onMediaItemChanged(
editedMediaItem, editedMediaItem.durationUs, currentInputFormat, /* isLast= */ false);
}
@Override
public boolean isEnded() {
// If we are playing the last media item in the sequence, we must also check that the controller
// is ended.
return inputStreamEnded
&& (!checkStateNotNull(currentEditedMediaItemInfo).isLastInSequence
|| controller.isEnded());
}
@Override
public boolean handleBuffer(
ByteBuffer buffer, long presentationTimeUs, int encodedAccessUnitCount) {
checkState(!inputStreamEnded);
return handleBufferInternal(buffer, presentationTimeUs, /* flags= */ 0);
}
@Override
public void playToEndOfStream() {
inputStreamEnded = true;
// Queue end-of-stream only if playing the last media item in the sequence.
if (!signalledEndOfStream && checkStateNotNull(currentEditedMediaItemInfo).isLastInSequence) {
signalledEndOfStream =
handleBufferInternal(
EMPTY_BUFFER, C.TIME_END_OF_SOURCE, /* flags= */ C.BUFFER_FLAG_END_OF_STREAM);
}
}
@Override
public @SinkFormatSupport int getFormatSupport(Format format) {
if (Objects.equals(format.sampleMimeType, MimeTypes.AUDIO_RAW)
&& format.pcmEncoding == C.ENCODING_PCM_16BIT) {
return SINK_FORMAT_SUPPORTED_DIRECTLY;
}
return SINK_FORMAT_UNSUPPORTED;
}
@Override
public boolean supportsFormat(Format format) {
return getFormatSupport(format) == SINK_FORMAT_SUPPORTED_DIRECTLY;
}
@Override
public boolean hasPendingData() {
return false;
}
@Override
public long getCurrentPositionUs(boolean sourceEnded) {
long currentPositionUs = controller.getCurrentPositionUs();
if (currentPositionUs != CURRENT_POSITION_NOT_SET) {
// Reset the position to the one expected by the player.
currentPositionUs -= offsetToCompositionTimeUs;
}
return currentPositionUs;
}
@Override
public void play() {
controller.onPlay();
}
@Override
public void pause() {
controller.onPause();
}
@Override
public void flush() {
inputStreamEnded = false;
signalledEndOfStream = false;
}
@Override
public void reset() {
flush();
currentInputFormat = null;
currentEditedMediaItemInfo = null;
controller.onReset();
}
// Unsupported interface functionality.
@Override
public void setListener(AudioSink.Listener listener) {}
@Override
public void handleDiscontinuity() {}
@Override
public void setAudioAttributes(AudioAttributes audioAttributes) {}
@Nullable
@Override
public AudioAttributes getAudioAttributes() {
return null;
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {}
@Override
public PlaybackParameters getPlaybackParameters() {
return PlaybackParameters.DEFAULT;
}
@Override
public void enableTunnelingV21() {}
@Override
public void disableTunneling() {}
@Override
public void setSkipSilenceEnabled(boolean skipSilenceEnabled) {}
@Override
public boolean getSkipSilenceEnabled() {
return false;
}
@Override
public void setAudioSessionId(int audioSessionId) {}
@Override
public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) {}
@Override
public void setVolume(float volume) {}
// Internal methods
private boolean handleBufferInternal(ByteBuffer buffer, long presentationTimeUs, int flags) {
checkStateNotNull(currentInputFormat);
checkState(!signalledEndOfStream);
AudioGraphInput outputGraphInput = checkNotNull(this.outputGraphInput);
@Nullable DecoderInputBuffer outputBuffer = outputGraphInput.getInputBuffer();
if (outputBuffer == null) {
return false;
}
outputBuffer.ensureSpaceForWrite(buffer.remaining());
checkNotNull(outputBuffer.data).put(buffer).flip();
outputBuffer.timeUs =
presentationTimeUs == C.TIME_END_OF_SOURCE
? C.TIME_END_OF_SOURCE
: presentationTimeUs + offsetToCompositionTimeUs;
outputBuffer.setFlags(flags);
return outputGraphInput.queueInputBuffer();
}
private static final class EditedMediaItemInfo {
public final EditedMediaItem editedMediaItem;
public final boolean isLastInSequence;
public EditedMediaItemInfo(EditedMediaItem editedMediaItem, boolean isLastInSequence) {
this.editedMediaItem = editedMediaItem;
this.isLastInSequence = isLastInSequence;
}
}
}

View File

@ -0,0 +1,955 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.usToMs;
import static java.lang.Math.min;
import android.content.Context;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Process;
import android.util.Pair;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import androidx.annotation.VisibleForTesting;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.Player;
import androidx.media3.common.PreviewingVideoGraph;
import androidx.media3.common.SimpleBasePlayer;
import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoSize;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util;
import androidx.media3.effect.PreviewingSingleInputVideoGraph;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.ExoPlayer;
import androidx.media3.exoplayer.RendererCapabilities;
import androidx.media3.exoplayer.audio.AudioSink;
import androidx.media3.exoplayer.audio.DefaultAudioSink;
import androidx.media3.exoplayer.image.ImageDecoder;
import androidx.media3.exoplayer.source.ConcatenatingMediaSource2;
import androidx.media3.exoplayer.source.DefaultMediaSourceFactory;
import androidx.media3.exoplayer.source.ExternalLoader;
import androidx.media3.exoplayer.source.MergingMediaSource;
import androidx.media3.exoplayer.source.SilenceMediaSource;
import androidx.media3.exoplayer.source.TrackGroupArray;
import androidx.media3.exoplayer.trackselection.DefaultTrackSelector;
import androidx.media3.exoplayer.trackselection.ExoTrackSelection;
import androidx.media3.exoplayer.util.EventLogger;
import androidx.media3.exoplayer.video.CompositingVideoSinkProvider;
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A {@link Player} implementation that plays {@linkplain Composition compositions} of media assets.
* The {@link Composition} specifies how the assets should be arranged, and the audio and video
* effects to apply to them.
*
* <p>CompositionPlayer instances must be accessed from a single application thread. For the vast
* majority of cases this should be the application's main thread. The thread on which a
* CompositionPlayer instance must be accessed can be explicitly specified by passing a {@link
* Looper} when creating the player. If no {@link Looper} is specified, then the {@link Looper} of
* the thread that the player is created on is used, or if that thread does not have a {@link
* Looper}, the {@link Looper} of the application's main thread is used. In all cases the {@link
* Looper} of the thread from which the player must be accessed can be queried using {@link
* #getApplicationLooper()}.
*/
@UnstableApi
@RestrictTo(LIBRARY_GROUP)
public final class CompositionPlayer extends SimpleBasePlayer
implements CompositionPlayerInternal.Listener,
CompositingVideoSinkProvider.Listener,
SurfaceHolder.Callback {
/** A builder for {@link CompositionPlayer} instances. */
public static final class Builder {
private final Context context;
private @MonotonicNonNull Looper looper;
private @MonotonicNonNull AudioSink audioSink;
private @MonotonicNonNull ExternalLoader externalImageLoader;
private ImageDecoder.Factory imageDecoderFactory;
private Clock clock;
private PreviewingVideoGraph.@MonotonicNonNull Factory previewingVideoGraphFactory;
private boolean built;
/**
* Creates an instance
*
* @param context The application context.
*/
public Builder(Context context) {
this.context = context.getApplicationContext();
imageDecoderFactory = ImageDecoder.Factory.DEFAULT;
clock = Clock.DEFAULT;
}
/**
* Sets the {@link Looper} from which the player can be accessed and {@link Player.Listener}
* callbacks are dispatched too.
*
* <p>By default, the builder uses the looper of the thread that calls {@link #build()}.
*
* @param looper The {@link Looper}.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setLooper(Looper looper) {
this.looper = looper;
return this;
}
/**
* Sets the {@link AudioSink} that will be used to play out audio.
*
* <p>By default, a {@link DefaultAudioSink} with its default configuration is used.
*
* @param audioSink The {@link AudioSink}.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setAudioSink(AudioSink audioSink) {
this.audioSink = audioSink;
return this;
}
/**
* Sets the {@link ExternalLoader} for loading image media items with MIME type set to {@link
* MimeTypes#APPLICATION_EXTERNALLY_LOADED_IMAGE}. When setting an external loader, also set an
* {@link ImageDecoder.Factory} with {@link #setImageDecoderFactory(ImageDecoder.Factory)}.
*
* <p>By default, the player will not be able to load images with media type of {@link
* androidx.media3.common.MimeTypes#APPLICATION_EXTERNALLY_LOADED_IMAGE}.
*
* @param externalImageLoader The {@link ExternalLoader}.
* @return This builder, for convenience.
* @see DefaultMediaSourceFactory#setExternalImageLoader(ExternalLoader)
*/
@CanIgnoreReturnValue
public Builder setExternalImageLoader(ExternalLoader externalImageLoader) {
this.externalImageLoader = externalImageLoader;
return this;
}
/**
* Sets an {@link ImageDecoder.Factory} that will create the {@link ImageDecoder} instances to
* decode images.
*
* <p>By default, {@link ImageDecoder.Factory#DEFAULT} is used.
*
* @param imageDecoderFactory The {@link ImageDecoder.Factory}.
* @return This builder, for convenience.
*/
@CanIgnoreReturnValue
public Builder setImageDecoderFactory(ImageDecoder.Factory imageDecoderFactory) {
this.imageDecoderFactory = imageDecoderFactory;
return this;
}
/**
* Sets the {@link Clock} that will be used by the player.
*
* <p>By default, {@link Clock#DEFAULT} is used.
*
* @param clock The {@link Clock}.
* @return This builder, for convenience.
*/
@VisibleForTesting
@CanIgnoreReturnValue
public Builder setClock(Clock clock) {
this.clock = clock;
return this;
}
/**
* Sets the {@link PreviewingVideoGraph.Factory} that will be used by the player.
*
* <p>By default, a {@link PreviewingSingleInputVideoGraph.Factory} is used.
*
* @param previewingVideoGraphFactory The {@link PreviewingVideoGraph.Factory}.
* @return This builder, for convenience.
*/
@VisibleForTesting
@CanIgnoreReturnValue
public Builder setPreviewingVideoGraphFactory(
PreviewingVideoGraph.Factory previewingVideoGraphFactory) {
this.previewingVideoGraphFactory = previewingVideoGraphFactory;
return this;
}
/**
* Builds the {@link CompositionPlayer} instance. Must be called at most once.
*
* <p>If no {@link Looper} has been called with {@link #setLooper(Looper)}, then this method
* must be called within a {@link Looper} thread which is the thread that can access the player
* instance and where {@link Player.Listener} callbacks are dispatched.
*/
public CompositionPlayer build() {
checkState(!built);
if (looper == null) {
looper = checkStateNotNull(Looper.myLooper());
}
if (audioSink == null) {
audioSink = new DefaultAudioSink.Builder(context).build();
}
if (previewingVideoGraphFactory == null) {
previewingVideoGraphFactory = new PreviewingSingleInputVideoGraph.Factory();
}
CompositionPlayer compositionPlayer = new CompositionPlayer(this);
built = true;
return compositionPlayer;
}
}
private static final Commands AVAILABLE_COMMANDS =
new Commands.Builder()
.addAll(
COMMAND_PLAY_PAUSE,
COMMAND_PREPARE,
COMMAND_STOP,
COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM,
COMMAND_SEEK_BACK,
COMMAND_SEEK_FORWARD,
COMMAND_GET_CURRENT_MEDIA_ITEM,
COMMAND_GET_TIMELINE,
COMMAND_SET_VIDEO_SURFACE,
COMMAND_RELEASE)
.build();
private static final @Event int[] SUPPORTED_LISTENER_EVENTS =
new int[] {
EVENT_PLAYBACK_STATE_CHANGED,
EVENT_PLAY_WHEN_READY_CHANGED,
EVENT_PLAYER_ERROR,
EVENT_POSITION_DISCONTINUITY
};
private static final int MAX_SUPPORTED_SEQUENCES = 2;
private static final String TAG = "CompositionPlayer";
private final Context context;
private final Clock clock;
private final HandlerWrapper applicationHandler;
private final List<ExoPlayer> players;
private final AudioSink finalAudioSink;
@Nullable private final ExternalLoader externalImageLoader;
private final ImageDecoder.Factory imageDecoderFactory;
private final PreviewingVideoGraph.Factory previewingVideoGraphFactory;
private final HandlerWrapper compositionInternalListenerHandler;
private @MonotonicNonNull HandlerThread playbackThread;
private @MonotonicNonNull CompositionPlayerInternal compositionPlayerInternal;
private @MonotonicNonNull ImmutableList<MediaItemData> playlist;
private @MonotonicNonNull Composition composition;
private @MonotonicNonNull Size videoOutputSize;
private long compositionDurationUs;
private boolean playWhenReady;
private @PlayWhenReadyChangeReason int playWhenReadyChangeReason;
private boolean renderedFirstFrame;
@Nullable private Object videoOutput;
@Nullable private PlaybackException playbackException;
private @Player.State int playbackState;
@Nullable private SurfaceHolder surfaceHolder;
@Nullable private Surface displaySurface;
private CompositionPlayer(Builder builder) {
super(checkNotNull(builder.looper), builder.clock);
context = builder.context;
clock = builder.clock;
applicationHandler = clock.createHandler(builder.looper, /* callback= */ null);
finalAudioSink = checkNotNull(builder.audioSink);
externalImageLoader = builder.externalImageLoader;
imageDecoderFactory = builder.imageDecoderFactory;
previewingVideoGraphFactory = checkNotNull(builder.previewingVideoGraphFactory);
compositionInternalListenerHandler = clock.createHandler(builder.looper, /* callback= */ null);
players = new ArrayList<>();
compositionDurationUs = C.TIME_UNSET;
playbackState = STATE_IDLE;
}
/**
* Sets the {@link Composition} to play.
*
* <p>This method should only be called once.
*
* @param composition The {@link Composition} to play. Every {@link EditedMediaItem} in the {@link
* Composition} must have its {@link EditedMediaItem#durationUs} set.
*/
public void setComposition(Composition composition) {
verifyApplicationThread();
checkArgument(
!composition.sequences.isEmpty()
&& composition.sequences.size() <= MAX_SUPPORTED_SEQUENCES);
checkState(this.composition == null);
setCompositionInternal(composition);
if (videoOutput != null) {
if (videoOutput instanceof SurfaceHolder) {
setVideoSurfaceHolderInternal((SurfaceHolder) videoOutput);
} else if (videoOutput instanceof SurfaceView) {
SurfaceView surfaceView = (SurfaceView) videoOutput;
setVideoSurfaceHolderInternal(surfaceView.getHolder());
} else if (videoOutput instanceof Surface) {
setVideoSurfaceInternal((Surface) videoOutput, checkNotNull(videoOutputSize));
} else {
throw new IllegalStateException(videoOutput.getClass().toString());
}
}
// Update the composition field at the end after everything else has been set.
this.composition = composition;
}
/** Sets the {@link Surface} and {@link Size} to render to. */
@VisibleForTesting
public void setVideoSurface(Surface surface, Size videoOutputSize) {
videoOutput = surface;
this.videoOutputSize = videoOutputSize;
setVideoSurfaceInternal(surface, videoOutputSize);
}
// CompositingVideoSinkProvider.Listener methods. Called on playback thread.
@Override
public void onFirstFrameRendered(CompositingVideoSinkProvider compositingVideoSinkProvider) {
applicationHandler.post(
() -> {
CompositionPlayer.this.renderedFirstFrame = true;
invalidateState();
});
}
@Override
public void onFrameDropped(CompositingVideoSinkProvider compositingVideoSinkProvider) {
// Do not post to application thread on each dropped frame, because onFrameDropped
// may be called frequently when resources are already scarce.
}
@Override
public void onVideoSizeChanged(
CompositingVideoSinkProvider compositingVideoSinkProvider, VideoSize videoSize) {
// TODO: b/328219481 - Report video size change to app.
}
@Override
public void onError(
CompositingVideoSinkProvider compositingVideoSinkProvider,
VideoFrameProcessingException videoFrameProcessingException) {
// The error will also be surfaced from the underlying ExoPlayer instance via
// PlayerListener.onPlayerError, and it will arrive to the composition player twice.
applicationHandler.post(
() ->
maybeUpdatePlaybackError(
"error from video sink provider",
videoFrameProcessingException,
PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED));
}
// SurfaceHolder.Callback methods. Called on application thread.
@Override
public void surfaceCreated(SurfaceHolder holder) {
videoOutputSize = new Size(holder.getSurfaceFrame().width(), holder.getSurfaceFrame().height());
setVideoSurfaceInternal(holder.getSurface(), videoOutputSize);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
maybeSetOutputSurfaceInfo(width, height);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
clearVideoSurfaceInternal();
}
// SimpleBasePlayer methods
@Override
protected State getState() {
@Player.State int oldPlaybackState = playbackState;
updatePlaybackState();
if (oldPlaybackState != STATE_READY && playbackState == STATE_READY && playWhenReady) {
for (int i = 0; i < players.size(); i++) {
players.get(i).setPlayWhenReady(true);
}
} else if (oldPlaybackState == STATE_READY
&& playWhenReady
&& playbackState == STATE_BUFFERING) {
// We were playing but a player got in buffering state, pause the players.
for (int i = 0; i < players.size(); i++) {
players.get(i).setPlayWhenReady(false);
}
}
// TODO: b/328219481 - Report video size change to app.
State.Builder state =
new State.Builder()
.setAvailableCommands(AVAILABLE_COMMANDS)
.setPlaybackState(playbackState)
.setPlayerError(playbackException)
.setPlayWhenReady(playWhenReady, playWhenReadyChangeReason)
.setContentPositionMs(this::getContentPositionMs)
.setContentBufferedPositionMs(this::getBufferedPositionMs)
.setTotalBufferedDurationMs(this::getTotalBufferedDurationMs)
.setNewlyRenderedFirstFrame(getRenderedFirstFrameAndReset());
if (playlist != null) {
// Update the playlist only after it has been set so that SimpleBasePlayer announces a
// timeline
// change with reason TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED.
state.setPlaylist(playlist);
}
return state.build();
}
@Override
protected ListenableFuture<?> handlePrepare() {
checkStateNotNull(composition, "No composition set");
if (playbackState != Player.STATE_IDLE) {
// The player has been prepared already.
return Futures.immediateVoidFuture();
}
for (int i = 0; i < players.size(); i++) {
players.get(i).prepare();
}
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleSetPlayWhenReady(boolean playWhenReady) {
this.playWhenReady = playWhenReady;
playWhenReadyChangeReason = PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST;
if (playbackState == STATE_READY) {
for (int i = 0; i < players.size(); i++) {
players.get(i).setPlayWhenReady(playWhenReady);
}
} // else, wait until all players are ready.
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleStop() {
for (int i = 0; i < players.size(); i++) {
players.get(i).stop();
}
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleRelease() {
if (composition == null) {
return Futures.immediateVoidFuture();
}
checkState(checkStateNotNull(playbackThread).isAlive());
// Release the players first so that they stop rendering.
for (int i = 0; i < players.size(); i++) {
players.get(i).release();
}
checkStateNotNull(compositionPlayerInternal).release();
removeSurfaceCallbacks();
// Remove any queued callback from the internal player.
compositionInternalListenerHandler.removeCallbacksAndMessages(/* token= */ null);
displaySurface = null;
checkStateNotNull(playbackThread).quitSafely();
applicationHandler.removeCallbacksAndMessages(/* token= */ null);
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleClearVideoOutput(@Nullable Object videoOutput) {
checkArgument(Util.areEqual(videoOutput, this.videoOutput));
this.videoOutput = null;
if (composition == null) {
return Futures.immediateVoidFuture();
}
removeSurfaceCallbacks();
clearVideoSurfaceInternal();
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleSetVideoOutput(Object videoOutput) {
if (!(videoOutput instanceof SurfaceHolder || videoOutput instanceof SurfaceView)) {
throw new UnsupportedOperationException(videoOutput.getClass().toString());
}
this.videoOutput = videoOutput;
if (composition == null) {
return Futures.immediateVoidFuture();
}
if (videoOutput instanceof SurfaceHolder) {
setVideoSurfaceHolderInternal((SurfaceHolder) videoOutput);
} else {
setVideoSurfaceHolderInternal(((SurfaceView) videoOutput).getHolder());
}
return Futures.immediateVoidFuture();
}
@Override
protected ListenableFuture<?> handleSeek(int mediaItemIndex, long positionMs, int seekCommand) {
CompositionPlayerInternal compositionPlayerInternal =
checkStateNotNull(this.compositionPlayerInternal);
compositionPlayerInternal.startSeek(positionMs);
for (int i = 0; i < players.size(); i++) {
players.get(i).seekTo(positionMs);
}
compositionPlayerInternal.endSeek();
return Futures.immediateVoidFuture();
}
// CompositionPlayerInternal.Listener methods
@Override
public void onError(String message, Exception cause, int errorCode) {
maybeUpdatePlaybackError(message, cause, errorCode);
}
// Internal methods
private void updatePlaybackState() {
if (players.isEmpty() || playbackException != null) {
playbackState = STATE_IDLE;
return;
}
int idleCount = 0;
int bufferingCount = 0;
int endedCount = 0;
for (int i = 0; i < players.size(); i++) {
@Player.State int playbackState = players.get(i).getPlaybackState();
switch (playbackState) {
case STATE_IDLE:
idleCount++;
break;
case STATE_BUFFERING:
bufferingCount++;
break;
case STATE_READY:
// ignore
break;
case STATE_ENDED:
endedCount++;
break;
default:
throw new IllegalStateException(String.valueOf(playbackState));
}
}
if (idleCount > 0) {
playbackState = STATE_IDLE;
} else if (bufferingCount > 0) {
playbackState = STATE_BUFFERING;
} else if (endedCount == players.size()) {
playbackState = STATE_ENDED;
} else {
playbackState = STATE_READY;
}
}
@SuppressWarnings("VisibleForTests") // Calls ExoPlayer.Builder.setClock()
private void setCompositionInternal(Composition composition) {
compositionDurationUs = getCompositionDurationUs(composition);
playbackThread = new HandlerThread("CompositionPlaybackThread", Process.THREAD_PRIORITY_AUDIO);
playbackThread.start();
// Create the audio and video composition components now in order to setup the audio and video
// pipelines. Once this method returns, further access to the audio and video pipelines must
// done on the playback thread only, to ensure related components are accessed from one thread
// only.
PreviewAudioPipeline previewAudioPipeline =
new PreviewAudioPipeline(
new DefaultAudioMixer.Factory(),
composition.effects.audioProcessors,
checkNotNull(finalAudioSink));
CompositingVideoSinkProvider compositingVideoSinkProvider =
new CompositingVideoSinkProvider.Builder(context)
.setPreviewingVideoGraphFactory(checkNotNull(previewingVideoGraphFactory))
.build();
compositingVideoSinkProvider.setVideoFrameReleaseControl(
new VideoFrameReleaseControl(
context, new CompositionFrameTimingEvaluator(), /* allowedJoiningTimeMs= */ 0));
compositingVideoSinkProvider.addListener(this);
for (int i = 0; i < composition.sequences.size(); i++) {
EditedMediaItemSequence editedMediaItemSequence = composition.sequences.get(i);
SequencePlayerRenderersWrapper playerRenderersWrapper =
i == 0
? SequencePlayerRenderersWrapper.create(
context,
editedMediaItemSequence,
previewAudioPipeline,
compositingVideoSinkProvider,
imageDecoderFactory)
: SequencePlayerRenderersWrapper.createForAudio(
context, editedMediaItemSequence, previewAudioPipeline);
ExoPlayer.Builder playerBuilder =
new ExoPlayer.Builder(context)
.setLooper(getApplicationLooper())
.setPlaybackLooper(playbackThread.getLooper())
.setRenderersFactory(playerRenderersWrapper)
.setHandleAudioBecomingNoisy(true)
.setClock(clock);
if (i == 0) {
playerBuilder.setTrackSelector(new CompositionTrackSelector(context));
}
ExoPlayer player = playerBuilder.build();
player.addListener(new PlayerListener(i));
player.addAnalyticsListener(new EventLogger());
setPlayerSequence(player, editedMediaItemSequence, /* shouldGenerateSilence= */ i == 0);
players.add(player);
if (i == 0) {
// Invalidate the player state before initializing the playlist to force SimpleBasePlayer
// to collect a state while the playlist is null. Consequently, once the playlist is
// initialized, SimpleBasePlayer will raise a timeline change callback with reason
// TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED.
invalidateState();
playlist = createPlaylist();
}
}
// From here after, composition player accessed the audio and video pipelines via the internal
// player. The internal player ensures access to the components is done on the playback thread.
compositionPlayerInternal =
new CompositionPlayerInternal(
playbackThread.getLooper(),
clock,
previewAudioPipeline,
compositingVideoSinkProvider,
/* listener= */ this,
compositionInternalListenerHandler);
}
/** Sets the {@linkplain EditedMediaItemSequence sequence} to be played by the player. */
private void setPlayerSequence(
ExoPlayer player, EditedMediaItemSequence sequence, boolean shouldGenerateSilence) {
ConcatenatingMediaSource2.Builder mediaSourceBuilder =
new ConcatenatingMediaSource2.Builder().useDefaultMediaSourceFactory(context);
for (int i = 0; i < sequence.editedMediaItems.size(); i++) {
EditedMediaItem editedMediaItem = sequence.editedMediaItems.get(i);
checkArgument(editedMediaItem.durationUs != C.TIME_UNSET);
long durationUs = editedMediaItem.getPresentationDurationUs();
if (shouldGenerateSilence) {
DefaultMediaSourceFactory defaultMediaSourceFactory =
new DefaultMediaSourceFactory(context);
if (externalImageLoader != null) {
defaultMediaSourceFactory.setExternalImageLoader(externalImageLoader);
}
mediaSourceBuilder.add(
new MergingMediaSource(
defaultMediaSourceFactory.createMediaSource(editedMediaItem.mediaItem),
// Generate silence as long as the MediaItem without clipping, because the actual
// media track starts at the clipped position. For example, if a video is 1000ms
// long and clipped 900ms from the start, its MediaSource will be enabled at 900ms
// during track selection, rather than at 0ms.
new SilenceMediaSource(editedMediaItem.durationUs)),
/* initialPlaceholderDurationMs= */ usToMs(durationUs));
} else {
mediaSourceBuilder.add(
editedMediaItem.mediaItem, /* initialPlaceholderDurationMs= */ usToMs(durationUs));
}
}
player.setMediaSource(mediaSourceBuilder.build());
}
private long getContentPositionMs() {
return players.isEmpty() ? C.TIME_UNSET : players.get(0).getContentPosition();
}
private long getBufferedPositionMs() {
if (players.isEmpty()) {
return 0;
}
// Return the minimum buffered position among players.
long minBufferedPositionMs = Integer.MAX_VALUE;
for (int i = 0; i < players.size(); i++) {
minBufferedPositionMs = min(minBufferedPositionMs, players.get(i).getBufferedPosition());
}
return minBufferedPositionMs;
}
private long getTotalBufferedDurationMs() {
if (players.isEmpty()) {
return 0;
}
// Return the minimum total buffered duration among players.
long minTotalBufferedDurationMs = Integer.MAX_VALUE;
for (int i = 0; i < players.size(); i++) {
minTotalBufferedDurationMs =
min(minTotalBufferedDurationMs, players.get(i).getTotalBufferedDuration());
}
return minTotalBufferedDurationMs;
}
private boolean getRenderedFirstFrameAndReset() {
boolean value = renderedFirstFrame;
renderedFirstFrame = false;
return value;
}
private void maybeUpdatePlaybackError(
String errorMessage, Exception cause, @PlaybackException.ErrorCode int errorCode) {
if (playbackException == null) {
playbackException = new PlaybackException(errorMessage, cause, errorCode);
for (int i = 0; i < players.size(); i++) {
players.get(i).stop();
}
invalidateState();
} else {
Log.w(TAG, errorMessage, cause);
}
}
private void setVideoSurfaceHolderInternal(SurfaceHolder surfaceHolder) {
removeSurfaceCallbacks();
this.surfaceHolder = surfaceHolder;
surfaceHolder.addCallback(this);
Surface surface = surfaceHolder.getSurface();
if (surface != null && surface.isValid()) {
videoOutputSize =
new Size(
surfaceHolder.getSurfaceFrame().width(), surfaceHolder.getSurfaceFrame().height());
setVideoSurfaceInternal(surface, videoOutputSize);
} else {
clearVideoSurfaceInternal();
}
}
private void setVideoSurfaceInternal(Surface surface, Size videoOutputSize) {
displaySurface = surface;
maybeSetOutputSurfaceInfo(videoOutputSize.getWidth(), videoOutputSize.getHeight());
}
private void maybeSetOutputSurfaceInfo(int width, int height) {
Surface surface = displaySurface;
if (width == 0 || height == 0 || surface == null || compositionPlayerInternal == null) {
return;
}
compositionPlayerInternal.setOutputSurfaceInfo(surface, new Size(width, height));
}
private void clearVideoSurfaceInternal() {
displaySurface = null;
if (compositionPlayerInternal != null) {
compositionPlayerInternal.clearOutputSurface();
}
}
private void removeSurfaceCallbacks() {
if (surfaceHolder != null) {
surfaceHolder.removeCallback(this);
surfaceHolder = null;
}
}
private ImmutableList<MediaItemData> createPlaylist() {
checkNotNull(compositionDurationUs != C.TIME_UNSET);
return ImmutableList.of(
new MediaItemData.Builder("CompositionTimeline")
.setMediaItem(MediaItem.EMPTY)
.setDurationUs(compositionDurationUs)
.build());
}
private static long getCompositionDurationUs(Composition composition) {
checkState(!composition.sequences.isEmpty());
long compositionDurationUs = getSequenceDurationUs(composition.sequences.get(0));
for (int i = 0; i < composition.sequences.size(); i++) {
long sequenceDurationUs = getSequenceDurationUs(composition.sequences.get(i));
checkArgument(
compositionDurationUs == sequenceDurationUs,
Util.formatInvariant(
"Non-matching sequence durations. First sequence duration: %d us, sequence [%d]"
+ " duration: %d us",
compositionDurationUs, i, sequenceDurationUs));
}
return compositionDurationUs;
}
private static long getSequenceDurationUs(EditedMediaItemSequence sequence) {
long compositionDurationUs = 0;
for (int i = 0; i < sequence.editedMediaItems.size(); i++) {
compositionDurationUs += sequence.editedMediaItems.get(i).getPresentationDurationUs();
}
checkState(compositionDurationUs > 0, String.valueOf(compositionDurationUs));
return compositionDurationUs;
}
/**
* A {@link VideoFrameReleaseControl.FrameTimingEvaluator} for composition frames.
*
* <ul>
* <li>Signals to {@linkplain
* VideoFrameReleaseControl.FrameTimingEvaluator#shouldForceReleaseFrame(long, long) force
* release} a frame if the frame is late by more than {@link #FRAME_LATE_THRESHOLD_US} and
* the elapsed time since the previous frame release is greater than {@link
* #FRAME_RELEASE_THRESHOLD_US}.
* <li>Signals to {@linkplain
* VideoFrameReleaseControl.FrameTimingEvaluator#shouldDropFrame(long, long, boolean) drop a
* frame} if the frame is late by more than {@link #FRAME_LATE_THRESHOLD_US} and the frame
* is not marked as the last one.
* <li>Signals to never {@linkplain
* VideoFrameReleaseControl.FrameTimingEvaluator#shouldIgnoreFrame(long, long, long,
* boolean, boolean) ignore} a frame.
* </ul>
*/
private static final class CompositionFrameTimingEvaluator
implements VideoFrameReleaseControl.FrameTimingEvaluator {
/** The time threshold, in microseconds, after which a frame is considered late. */
private static final long FRAME_LATE_THRESHOLD_US = -30_000;
/**
* The maximum elapsed time threshold, in microseconds, since last releasing a frame after which
* a frame can be force released.
*/
private static final long FRAME_RELEASE_THRESHOLD_US = 100_000;
@Override
public boolean shouldForceReleaseFrame(long earlyUs, long elapsedSinceLastReleaseUs) {
return earlyUs < FRAME_LATE_THRESHOLD_US
&& elapsedSinceLastReleaseUs > FRAME_RELEASE_THRESHOLD_US;
}
@Override
public boolean shouldDropFrame(long earlyUs, long elapsedRealtimeUs, boolean isLastFrame) {
return earlyUs < FRAME_LATE_THRESHOLD_US && !isLastFrame;
}
@Override
public boolean shouldIgnoreFrame(
long earlyUs,
long positionUs,
long elapsedRealtimeUs,
boolean isLastFrame,
boolean treatDroppedBuffersAsSkipped) {
// TODO: b/293873191 - Handle very late buffers and drop to key frame.
return false;
}
}
private final class PlayerListener implements Player.Listener {
private final int playerIndex;
public PlayerListener(int playerIndex) {
this.playerIndex = playerIndex;
}
@Override
public void onEvents(Player player, Events events) {
if (events.containsAny(SUPPORTED_LISTENER_EVENTS)) {
invalidateState();
}
}
@Override
public void onPlayWhenReadyChanged(boolean playWhenReady, int reason) {
playWhenReadyChangeReason = reason;
}
@Override
public void onPlayerError(PlaybackException error) {
maybeUpdatePlaybackError("error from player " + playerIndex, error, error.errorCode);
}
}
/**
* A {@link DefaultTrackSelector} extension to de-select generated audio when the audio from the
* media is playable.
*/
private static final class CompositionTrackSelector extends DefaultTrackSelector {
private static final String SILENCE_AUDIO_TRACK_GROUP_ID = "1:";
public CompositionTrackSelector(Context context) {
super(context);
}
@Nullable
@Override
protected Pair<ExoTrackSelection.Definition, Integer> selectAudioTrack(
MappedTrackInfo mappedTrackInfo,
@RendererCapabilities.Capabilities int[][][] rendererFormatSupports,
@RendererCapabilities.AdaptiveSupport int[] rendererMixedMimeTypeAdaptationSupports,
Parameters params)
throws ExoPlaybackException {
int audioRenderIndex = C.INDEX_UNSET;
for (int i = 0; i < mappedTrackInfo.getRendererCount(); i++) {
if (mappedTrackInfo.getRendererType(i) == C.TRACK_TYPE_AUDIO) {
audioRenderIndex = i;
break;
}
}
checkState(audioRenderIndex != C.INDEX_UNSET);
TrackGroupArray audioTrackGroups = mappedTrackInfo.getTrackGroups(audioRenderIndex);
// If there's only one audio TrackGroup, it'll be silence, there's no need to override track
// selection.
if (audioTrackGroups.length > 1) {
boolean mediaAudioIsPlayable = false;
int silenceAudioTrackGroupIndex = C.INDEX_UNSET;
for (int i = 0; i < audioTrackGroups.length; i++) {
if (audioTrackGroups.get(i).id.startsWith(SILENCE_AUDIO_TRACK_GROUP_ID)) {
silenceAudioTrackGroupIndex = i;
continue;
}
// For non-silence tracks
for (int j = 0; j < audioTrackGroups.get(i).length; j++) {
mediaAudioIsPlayable |=
RendererCapabilities.getFormatSupport(
rendererFormatSupports[audioRenderIndex][i][j])
== C.FORMAT_HANDLED;
}
}
checkState(silenceAudioTrackGroupIndex != C.INDEX_UNSET);
if (mediaAudioIsPlayable) {
// Disable silence if the media's audio track is playable.
int silenceAudioTrackIndex = audioTrackGroups.length - 1;
rendererFormatSupports[audioRenderIndex][silenceAudioTrackIndex][0] =
RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
}
}
return super.selectAudioTrack(
mappedTrackInfo, rendererFormatSupports, rendererMixedMimeTypeAdaptationSupports, params);
}
}
}

View File

@ -0,0 +1,237 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkState;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.view.Surface;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.Size;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.video.CompositingVideoSinkProvider;
/** Provides access to the composition preview audio and video components on the playback thread. */
/* package */ final class CompositionPlayerInternal implements Handler.Callback {
/** A listener for events. */
public interface Listener {
/**
* Called when an error occurs
*
* @param message The error message.
* @param cause The error cause.
* @param errorCode The error code.
*/
void onError(String message, Exception cause, @PlaybackException.ErrorCode int errorCode);
}
private static final String TAG = "CompPlayerInternal";
private static final int MSG_SET_OUTPUT_SURFACE_INFO = 0;
private static final int MSG_CLEAR_OUTPUT_SURFACE = 1;
private static final int MSG_START_SEEK = 2;
private static final int MSG_END_SEEK = 3;
private static final int MSG_RELEASE = 4;
private final Clock clock;
private final HandlerWrapper handler;
/** Must be accessed on the playback thread only. */
private final PreviewAudioPipeline previewAudioPipeline;
/** Must be accessed on the playback thread only. */
private final CompositingVideoSinkProvider compositingVideoSinkProvider;
private final Listener listener;
private final HandlerWrapper listenerHandler;
private boolean released;
/**
* Creates a instance.
*
* @param playbackLooper The playback thread {@link Looper}.
* @param clock The {@link Clock} used.
* @param previewAudioPipeline The {@link PreviewAudioPipeline}.
* @param compositingVideoSinkProvider The {@link CompositingVideoSinkProvider}.
* @param listener A {@link Listener} to send callbacks back to the player.
* @param listenerHandler A {@link HandlerWrapper} to dispatch {@link Listener} callbacks.
*/
public CompositionPlayerInternal(
Looper playbackLooper,
Clock clock,
PreviewAudioPipeline previewAudioPipeline,
CompositingVideoSinkProvider compositingVideoSinkProvider,
Listener listener,
HandlerWrapper listenerHandler) {
this.clock = clock;
this.handler = clock.createHandler(playbackLooper, /* callback= */ this);
this.previewAudioPipeline = previewAudioPipeline;
this.compositingVideoSinkProvider = compositingVideoSinkProvider;
this.listener = listener;
this.listenerHandler = listenerHandler;
}
// Public methods
/** Sets the output surface information on the video pipeline. */
public void setOutputSurfaceInfo(Surface surface, Size size) {
handler
.obtainMessage(MSG_SET_OUTPUT_SURFACE_INFO, new OutputSurfaceInfo(surface, size))
.sendToTarget();
}
/** Clears the output surface from the video pipeline. */
public void clearOutputSurface() {
handler.obtainMessage(MSG_CLEAR_OUTPUT_SURFACE).sendToTarget();
}
public void startSeek(long positionMs) {
handler.obtainMessage(MSG_START_SEEK, positionMs).sendToTarget();
}
public void endSeek() {
handler.obtainMessage(MSG_END_SEEK).sendToTarget();
}
/**
* Releases internal components on the playback thread and blocks the current thread until the
* components are released.
*/
public void release() {
checkState(!released);
// Set released to true now to silence any pending listener callback.
released = true;
ConditionVariable conditionVariable = new ConditionVariable();
handler.obtainMessage(MSG_RELEASE, conditionVariable).sendToTarget();
clock.onThreadBlocked();
try {
conditionVariable.block();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException(e);
}
}
// Handler.Callback methods
@Override
public boolean handleMessage(Message message) {
try {
switch (message.what) {
case MSG_SET_OUTPUT_SURFACE_INFO:
setOutputSurfaceInfoOnInternalThread(
/* outputSurfaceInfo= */ (OutputSurfaceInfo) message.obj);
break;
case MSG_CLEAR_OUTPUT_SURFACE:
clearOutputSurfaceInternal();
break;
case MSG_START_SEEK:
// Video seeking is currently handled by the video renderers, specifically in
// onPositionReset.
previewAudioPipeline.startSeek(/* positionUs= */ Util.msToUs((long) message.obj));
break;
case MSG_END_SEEK:
previewAudioPipeline.endSeek();
break;
case MSG_RELEASE:
releaseInternal(/* conditionVariable= */ (ConditionVariable) message.obj);
break;
default:
maybeRaiseError(
/* message= */ "Unknown message",
new IllegalStateException(String.valueOf(message.what)),
/* errorCode= */ PlaybackException.ERROR_CODE_UNSPECIFIED);
}
} catch (RuntimeException e) {
maybeRaiseError(
/* message= */ "Unknown error",
e,
/* errorCode= */ PlaybackException.ERROR_CODE_UNSPECIFIED);
}
return true;
}
// Internal methods
private void releaseInternal(ConditionVariable conditionVariable) {
try {
previewAudioPipeline.release();
compositingVideoSinkProvider.clearOutputSurfaceInfo();
compositingVideoSinkProvider.release();
} catch (RuntimeException e) {
Log.e(TAG, "error while releasing the player", e);
} finally {
conditionVariable.open();
}
}
private void clearOutputSurfaceInternal() {
try {
compositingVideoSinkProvider.clearOutputSurfaceInfo();
} catch (RuntimeException e) {
maybeRaiseError(
/* message= */ "error clearing video output",
e,
/* errorCode= */ PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
}
}
private void setOutputSurfaceInfoOnInternalThread(OutputSurfaceInfo outputSurfaceInfo) {
try {
compositingVideoSinkProvider.setOutputSurfaceInfo(
outputSurfaceInfo.surface, outputSurfaceInfo.size);
} catch (RuntimeException e) {
maybeRaiseError(
/* message= */ "error setting surface view",
e,
/* errorCode= */ PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSING_FAILED);
}
}
private void maybeRaiseError(
String message, Exception cause, @PlaybackException.ErrorCode int errorCode) {
try {
listenerHandler.post(
() -> {
// This code runs on the application thread, hence access to the `release` field does
// not need to be synchronized.
if (!released) {
listener.onError(message, cause, errorCode);
}
});
} catch (RuntimeException e) {
Log.e(TAG, "error", e);
}
}
private static final class OutputSurfaceInfo {
public final Surface surface;
public final Size size;
public OutputSurfaceInfo(Surface surface, Size size) {
this.surface = surface;
this.size = size;
}
}
}

View File

@ -0,0 +1,211 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Util.sampleCountToDurationUs;
import androidx.media3.common.Format;
import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
import androidx.media3.common.util.Util;
import androidx.media3.exoplayer.audio.AudioSink;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.Objects;
/**
* Processes input from {@link AudioGraphInputAudioSink} instances, plumbing the data through an
* {@link AudioGraph} and writing the output to the provided {@link AudioSink}.
*
* <p>Multiple streams of {@linkplain #createInput() input} are not currently supported.
*/
/* package */ final class PreviewAudioPipeline {
private final AudioSink finalAudioSink;
private final AudioGraph audioGraph;
private int audioGraphInputsCreated;
private int inputAudioSinksCreated;
private int inputAudioSinksPlaying;
private AudioFormat outputAudioFormat;
private long outputFramesWritten;
private long seekPositionUs;
/**
* Creates an instance.
*
* @param mixerFactory The {@linkplain AudioMixer.Factory factory} used to {@linkplain
* AudioMixer.Factory#create() create} the underlying {@link AudioMixer}.
* @param effects The composition-level audio effects that are applied after mixing.
* @param finalAudioSink The {@linkplain AudioSink sink} for processed output audio.
*/
public PreviewAudioPipeline(
AudioMixer.Factory mixerFactory,
ImmutableList<AudioProcessor> effects,
AudioSink finalAudioSink) {
audioGraph = new AudioGraph(mixerFactory, effects);
this.finalAudioSink = finalAudioSink;
outputAudioFormat = AudioFormat.NOT_SET;
}
/** Releases any underlying resources. */
public void release() {
audioGraph.reset();
finalAudioSink.reset();
finalAudioSink.release();
audioGraphInputsCreated = 0;
inputAudioSinksCreated = 0;
inputAudioSinksPlaying = 0;
}
/** Returns an {@link AudioSink} for a single sequence of non-overlapping raw PCM audio. */
public AudioGraphInputAudioSink createInput() {
return new AudioGraphInputAudioSink(new SinkController());
}
/**
* Processes data through the underlying components.
*
* @return Whether more data can be processed by immediately calling this method again.
*/
public boolean processData()
throws ExportException,
AudioSink.WriteException,
AudioSink.InitializationException,
AudioSink.ConfigurationException {
// Do not process any data until the input audio sinks have created audio graph inputs.
if (inputAudioSinksCreated == 0 || inputAudioSinksCreated != audioGraphInputsCreated) {
return false;
}
if (Objects.equals(outputAudioFormat, AudioFormat.NOT_SET)) {
AudioFormat audioGraphAudioFormat = audioGraph.getOutputAudioFormat();
if (Objects.equals(audioGraphAudioFormat, AudioFormat.NOT_SET)) {
return false;
}
finalAudioSink.configure(
Util.getPcmFormat(audioGraphAudioFormat),
/* specifiedBufferSize= */ 0,
/* outputChannels= */ null);
outputAudioFormat = audioGraphAudioFormat;
}
if (audioGraph.isEnded()) {
if (finalAudioSink.isEnded()) {
return false;
}
finalAudioSink.playToEndOfStream();
return false;
}
ByteBuffer audioBuffer = audioGraph.getOutput();
if (!audioBuffer.hasRemaining()) {
return false;
}
int bytesToWrite = audioBuffer.remaining();
boolean bufferHandled =
finalAudioSink.handleBuffer(
audioBuffer, getBufferPresentationTimeUs(), /* encodedAccessUnitCount= */ 1);
outputFramesWritten +=
(bytesToWrite - audioBuffer.remaining()) / outputAudioFormat.bytesPerFrame;
return bufferHandled;
}
private long getBufferPresentationTimeUs() {
return seekPositionUs
+ sampleCountToDurationUs(outputFramesWritten, outputAudioFormat.sampleRate);
}
/**
* Handles the steps that need to be executed for a seek before seeking the upstream players.
*
* @param positionUs The seek position, in microseconds.
*/
public void startSeek(long positionUs) {
finalAudioSink.pause();
audioGraph.blockInput();
audioGraph.setPendingStartTimeUs(positionUs);
audioGraph.flush();
finalAudioSink.flush();
outputFramesWritten = 0;
seekPositionUs = positionUs;
}
/** Handles the steps that need to be executed for a seek after seeking the upstream players. */
public void endSeek() {
audioGraph.unblockInput();
}
private final class SinkController implements AudioGraphInputAudioSink.Controller {
private boolean playing;
public SinkController() {
inputAudioSinksCreated++;
}
@Override
public AudioGraphInput getAudioGraphInput(EditedMediaItem editedMediaItem, Format format)
throws ExportException {
AudioGraphInput audioGraphInput = audioGraph.registerInput(editedMediaItem, format);
audioGraphInputsCreated++;
return audioGraphInput;
}
@Override
public long getCurrentPositionUs() {
return finalAudioSink.getCurrentPositionUs(/* sourceEnded= */ false);
}
@Override
public boolean isEnded() {
return finalAudioSink.isEnded();
}
@Override
public void onPlay() {
if (playing) {
return;
}
playing = true;
inputAudioSinksPlaying++;
if (inputAudioSinksCreated == inputAudioSinksPlaying) {
finalAudioSink.play();
}
}
@Override
public void onPause() {
if (!playing) {
return;
}
playing = false;
if (inputAudioSinksCreated == inputAudioSinksPlaying) {
finalAudioSink.pause();
}
inputAudioSinksPlaying--;
}
@Override
public void onReset() {
onPause();
}
}
}

View File

@ -0,0 +1,433 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.exoplayer.DefaultRenderersFactory.DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS;
import static androidx.media3.exoplayer.DefaultRenderersFactory.MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Handler;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
import androidx.media3.common.Effect;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.PlaybackException;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.exoplayer.ExoPlaybackException;
import androidx.media3.exoplayer.Renderer;
import androidx.media3.exoplayer.RenderersFactory;
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
import androidx.media3.exoplayer.audio.AudioSink;
import androidx.media3.exoplayer.audio.MediaCodecAudioRenderer;
import androidx.media3.exoplayer.image.ImageDecoder;
import androidx.media3.exoplayer.image.ImageOutput;
import androidx.media3.exoplayer.image.ImageRenderer;
import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter;
import androidx.media3.exoplayer.mediacodec.MediaCodecSelector;
import androidx.media3.exoplayer.metadata.MetadataOutput;
import androidx.media3.exoplayer.source.MediaSource;
import androidx.media3.exoplayer.text.TextOutput;
import androidx.media3.exoplayer.video.CompositingVideoSinkProvider;
import androidx.media3.exoplayer.video.MediaCodecVideoRenderer;
import androidx.media3.exoplayer.video.VideoFrameReleaseControl;
import androidx.media3.exoplayer.video.VideoRendererEventListener;
import androidx.media3.exoplayer.video.VideoSink;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Wraps {@link EditedMediaItemSequence} specific rendering logic and state. */
/* package */ final class SequencePlayerRenderersWrapper implements RenderersFactory {
private static final int DEFAULT_FRAME_RATE = 30;
private final Context context;
private final EditedMediaItemSequence sequence;
private final PreviewAudioPipeline previewAudioPipeline;
@Nullable private final CompositingVideoSinkProvider compositingVideoSinkProvider;
@Nullable private final ImageDecoder.Factory imageDecoderFactory;
/** Creates a renderers wrapper for a player that will play video, image and audio. */
public static SequencePlayerRenderersWrapper create(
Context context,
EditedMediaItemSequence sequence,
PreviewAudioPipeline previewAudioPipeline,
CompositingVideoSinkProvider compositingVideoSinkProvider,
ImageDecoder.Factory imageDecoderFactory) {
return new SequencePlayerRenderersWrapper(
context, sequence, previewAudioPipeline, compositingVideoSinkProvider, imageDecoderFactory);
}
/** Creates a renderers wrapper that for a player that will only play audio. */
public static SequencePlayerRenderersWrapper createForAudio(
Context context,
EditedMediaItemSequence sequence,
PreviewAudioPipeline previewAudioPipeline) {
return new SequencePlayerRenderersWrapper(
context,
sequence,
previewAudioPipeline,
/* compositingVideoSinkProvider= */ null,
/* imageDecoderFactory= */ null);
}
private SequencePlayerRenderersWrapper(
Context context,
EditedMediaItemSequence sequence,
PreviewAudioPipeline previewAudioPipeline,
@Nullable CompositingVideoSinkProvider compositingVideoSinkProvider,
@Nullable ImageDecoder.Factory imageDecoderFactory) {
this.context = context;
this.sequence = sequence;
this.previewAudioPipeline = previewAudioPipeline;
this.compositingVideoSinkProvider = compositingVideoSinkProvider;
this.imageDecoderFactory = imageDecoderFactory;
}
@Override
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput) {
List<Renderer> renderers = new ArrayList<>();
renderers.add(
new SequenceAudioRenderer(
context,
/* sequencePlayerRenderersWrapper= */ this,
eventHandler,
audioRendererEventListener,
previewAudioPipeline.createInput()));
if (compositingVideoSinkProvider != null) {
renderers.add(
new SequenceVideoRenderer(
checkStateNotNull(context),
eventHandler,
videoRendererEventListener,
/* sequencePlayerRenderersWrapper= */ this));
renderers.add(new SequenceImageRenderer(/* sequencePlayerRenderersWrapper= */ this));
}
return renderers.toArray(new Renderer[0]);
}
private static final class SequenceAudioRenderer extends MediaCodecAudioRenderer {
private final SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper;
private final AudioGraphInputAudioSink audioSink;
@Nullable private EditedMediaItem pendingEditedMediaItem;
private long pendingOffsetToCompositionTimeUs;
// TODO - b/320007703: Revisit the abstractions needed here (editedMediaItemProvider and
// Supplier<EditedMediaItem>) once we finish all the wiring to support multiple sequences.
public SequenceAudioRenderer(
Context context,
SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioGraphInputAudioSink audioSink) {
super(context, MediaCodecSelector.DEFAULT, eventHandler, eventListener, audioSink);
this.sequencePlayerRenderersWrapper = sequencePlayerRenderersWrapper;
this.audioSink = audioSink;
}
// MediaCodecAudioRenderer methods
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
super.render(positionUs, elapsedRealtimeUs);
try {
while (sequencePlayerRenderersWrapper.previewAudioPipeline.processData()) {}
} catch (ExportException
| AudioSink.WriteException
| AudioSink.InitializationException
| AudioSink.ConfigurationException e) {
throw createRendererException(
e, /* format= */ null, ExoPlaybackException.ERROR_CODE_AUDIO_TRACK_WRITE_FAILED);
}
}
@Override
protected void onStreamChanged(
Format[] formats,
long startPositionUs,
long offsetUs,
MediaSource.MediaPeriodId mediaPeriodId)
throws ExoPlaybackException {
checkState(getTimeline().getWindowCount() == 1);
int mediaItemIndex = getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid);
// We must first update the pending media item state before calling super.onStreamChanged()
// because the super method will call onProcessedStreamChange()
pendingEditedMediaItem =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(mediaItemIndex);
// Reverse engineer how timestamps and offsets are computed with a ConcatenatingMediaSource2
// to compute an offset converting buffer timestamps to composition timestamps.
// startPositionUs is not used because it is equal to offsetUs + clipping start time + seek
// position when seeking from any MediaItem in the playlist to the first MediaItem.
// TODO(b/331547894): remove this reverse-engineered logic by moving away from using a
// ConcatenatingMediaSource2.
// The offset to convert the sample timestamps to composition time is negative because we need
// to remove the large offset added by ExoPlayer to make sure the decoder doesn't received any
// negative timestamps. We also need to remove the clipping start position.
pendingOffsetToCompositionTimeUs = -offsetUs;
if (mediaItemIndex == 0) {
pendingOffsetToCompositionTimeUs -=
pendingEditedMediaItem.mediaItem.clippingConfiguration.startPositionUs;
}
for (int i = 0; i < mediaItemIndex; i++) {
pendingOffsetToCompositionTimeUs +=
sequencePlayerRenderersWrapper
.sequence
.editedMediaItems
.get(i)
.getPresentationDurationUs();
}
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
}
@Override
protected void onProcessedStreamChange() {
super.onProcessedStreamChange();
onMediaItemChanged();
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
super.onPositionReset(positionUs, joining);
onMediaItemChanged();
}
// Other methods
private void onMediaItemChanged() {
EditedMediaItem currentEditedMediaItem = checkStateNotNull(pendingEditedMediaItem);
// Use reference equality intentionally.
boolean isLastInSequence =
currentEditedMediaItem
== Iterables.getLast(sequencePlayerRenderersWrapper.sequence.editedMediaItems);
audioSink.onMediaItemChanged(
currentEditedMediaItem, pendingOffsetToCompositionTimeUs, isLastInSequence);
}
}
private static final class SequenceVideoRenderer extends MediaCodecVideoRenderer {
private final SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper;
private final VideoSink videoSink;
@Nullable private ImmutableList<Effect> pendingEffect;
public SequenceVideoRenderer(
Context context,
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper) {
super(
context,
MediaCodecAdapter.Factory.getDefault(context),
MediaCodecSelector.DEFAULT,
DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS,
/* enableDecoderFallback= */ false,
eventHandler,
videoRendererEventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY,
/* assumedMinimumCodecOperatingRate= */ DEFAULT_FRAME_RATE,
checkStateNotNull(sequencePlayerRenderersWrapper.compositingVideoSinkProvider));
this.sequencePlayerRenderersWrapper = sequencePlayerRenderersWrapper;
videoSink =
checkStateNotNull(sequencePlayerRenderersWrapper.compositingVideoSinkProvider).getSink();
}
@Override
protected void onStreamChanged(
Format[] formats,
long startPositionUs,
long offsetUs,
MediaSource.MediaPeriodId mediaPeriodId)
throws ExoPlaybackException {
checkState(getTimeline().getWindowCount() == 1);
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
pendingEffect =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(
getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid))
.effects
.videoEffects;
}
@Override
protected void onReadyToRegisterVideoSinkInputStream() {
@Nullable ImmutableList<Effect> pendingEffect = this.pendingEffect;
if (pendingEffect != null) {
videoSink.setPendingVideoEffects(pendingEffect);
this.pendingEffect = null;
}
}
}
private static final class SequenceImageRenderer extends ImageRenderer {
private final SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper;
private final CompositingVideoSinkProvider compositingVideoSinkProvider;
private final VideoSink videoSink;
private final VideoFrameReleaseControl videoFrameReleaseControl;
private ImmutableList<Effect> videoEffects;
private @MonotonicNonNull ConstantRateTimestampIterator timestampIterator;
private boolean inputStreamPendingRegistration;
@Nullable private ExoPlaybackException pendingExoPlaybackException;
private long streamOffsetUs;
private boolean mayRenderStartOfStream;
public SequenceImageRenderer(SequencePlayerRenderersWrapper sequencePlayerRenderersWrapper) {
super(
checkStateNotNull(sequencePlayerRenderersWrapper.imageDecoderFactory), ImageOutput.NO_OP);
this.sequencePlayerRenderersWrapper = sequencePlayerRenderersWrapper;
compositingVideoSinkProvider =
checkStateNotNull(sequencePlayerRenderersWrapper.compositingVideoSinkProvider);
videoSink = compositingVideoSinkProvider.getSink();
videoFrameReleaseControl =
checkStateNotNull(compositingVideoSinkProvider.getVideoFrameReleaseControl());
videoEffects = ImmutableList.of();
streamOffsetUs = C.TIME_UNSET;
}
// ImageRenderer methods
@Override
protected void onEnabled(boolean joining, boolean mayRenderStartOfStream)
throws ExoPlaybackException {
super.onEnabled(joining, mayRenderStartOfStream);
this.mayRenderStartOfStream = mayRenderStartOfStream;
videoFrameReleaseControl.onEnabled(mayRenderStartOfStream);
if (joining) {
videoFrameReleaseControl.join(/* renderNextFrameImmediately= */ false);
}
if (!videoSink.isInitialized()) {
Format format = new Format.Builder().build();
try {
videoSink.initialize(format, getClock());
} catch (VideoSink.VideoSinkException e) {
throw createRendererException(
e, format, PlaybackException.ERROR_CODE_VIDEO_FRAME_PROCESSOR_INIT_FAILED);
}
}
// TODO - b/328444280: Do not set a listener on VideoSink, but MediaCodecVideoRenderer must
// unregister itself as a listener too.
videoSink.setListener(VideoSink.Listener.NO_OP, /* executor= */ (runnable) -> {});
}
@Override
protected void onDisabled() {
super.onDisabled();
videoFrameReleaseControl.onDisabled();
}
@Override
public boolean isReady() {
// If the renderer was enabled with mayRenderStartOfStream set to false, meaning the image
// renderer is playing after a video, we don't need to wait until the first frame is rendered.
// If the renderer was enabled with mayRenderStartOfStream, we must wait until the first frame
// is rendered, which is checked by VideoSink.isReady().
return super.isReady() && (!mayRenderStartOfStream || videoSink.isReady());
}
@Override
protected void onReset() {
super.onReset();
pendingExoPlaybackException = null;
}
@Override
protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
videoSink.flush();
super.onPositionReset(positionUs, joining);
videoFrameReleaseControl.reset();
if (joining) {
videoFrameReleaseControl.join(/* renderNextFrameImmediately= */ false);
}
}
@Override
protected void onStarted() throws ExoPlaybackException {
super.onStarted();
videoFrameReleaseControl.onStarted();
}
@Override
protected void onStopped() {
super.onStopped();
videoFrameReleaseControl.onStopped();
}
@Override
protected void onStreamChanged(
Format[] formats,
long startPositionUs,
long offsetUs,
MediaSource.MediaPeriodId mediaPeriodId)
throws ExoPlaybackException {
checkState(getTimeline().getWindowCount() == 1);
super.onStreamChanged(formats, startPositionUs, offsetUs, mediaPeriodId);
streamOffsetUs = offsetUs;
EditedMediaItem editedMediaItem =
sequencePlayerRenderersWrapper.sequence.editedMediaItems.get(
getTimeline().getIndexOfPeriod(mediaPeriodId.periodUid));
videoEffects = editedMediaItem.effects.videoEffects;
timestampIterator =
new ConstantRateTimestampIterator(
editedMediaItem.getPresentationDurationUs(), /* frameRate= */ DEFAULT_FRAME_RATE);
inputStreamPendingRegistration = true;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (pendingExoPlaybackException != null) {
ExoPlaybackException exoPlaybackException = pendingExoPlaybackException;
pendingExoPlaybackException = null;
throw exoPlaybackException;
}
super.render(positionUs, elapsedRealtimeUs);
compositingVideoSinkProvider.render(positionUs, elapsedRealtimeUs);
}
@Override
protected boolean processOutputBuffer(
long positionUs, long elapsedRealtimeUs, Bitmap outputImage, long timeUs) {
if (inputStreamPendingRegistration) {
checkState(streamOffsetUs != C.TIME_UNSET);
videoSink.setPendingVideoEffects(videoEffects);
videoSink.setStreamOffsetUs(streamOffsetUs);
videoSink.registerInputStream(
VideoSink.INPUT_TYPE_BITMAP,
new Format.Builder()
.setSampleMimeType(MimeTypes.IMAGE_RAW)
.setWidth(outputImage.getWidth())
.setHeight(outputImage.getHeight())
.setColorInfo(ColorInfo.SRGB_BT709_FULL)
.build());
videoFrameReleaseControl.setFrameRate(/* frameRate= */ DEFAULT_FRAME_RATE);
inputStreamPendingRegistration = false;
}
return videoSink.queueBitmap(outputImage, checkStateNotNull(timestampIterator));
}
}
}

View File

@ -0,0 +1,326 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.transformer.TestUtil.ASSET_URI_PREFIX;
import static androidx.media3.transformer.TestUtil.FILE_AUDIO_RAW;
import static androidx.media3.transformer.TestUtil.FILE_AUDIO_RAW_STEREO_48000KHZ;
import static androidx.media3.transformer.TestUtil.createAudioEffects;
import static androidx.media3.transformer.TestUtil.createVolumeScalingAudioProcessor;
import android.content.Context;
import androidx.media3.common.MediaItem;
import androidx.media3.common.Player;
import androidx.media3.exoplayer.audio.AudioSink;
import androidx.media3.exoplayer.audio.DefaultAudioSink;
import androidx.media3.test.utils.CapturingAudioSink;
import androidx.media3.test.utils.DumpFileAsserts;
import androidx.media3.test.utils.FakeClock;
import androidx.media3.test.utils.robolectric.TestPlayerRunHelper;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Audio playback unit tests for {@link CompositionPlayer}.
*
* <p>These tests focus on audio because the video pipeline doesn't work in Robolectric.
*/
@RunWith(AndroidJUnit4.class)
public final class CompositionPlayerAudioPlaybackTest {
private final Context context = ApplicationProvider.getApplicationContext();
private CapturingAudioSink capturingAudioSink;
@Before
public void setUp() throws Exception {
capturingAudioSink = new CapturingAudioSink(new DefaultAudioSink.Builder(context).build());
}
@Test
public void playback_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(
MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ))
.setDurationUs(348_000L)
.build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context, capturingAudioSink, "audiosinkdumps/wav/sample.wav_then_sample_rf64.wav.dump");
}
@Test
public void playback_compositionWithEffects_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.setEffects(createAudioEffects(createVolumeScalingAudioProcessor(0.5f)))
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(
MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ))
.setDurationUs(348_000L)
.setEffects(createAudioEffects(createVolumeScalingAudioProcessor(2f)))
.build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context,
capturingAudioSink,
"audiosinkdumps/wav/sample.wav-lowVolume_then_sample_rf64.wav-highVolume.dump");
}
@Test
public void playback_singleAudioItemWithEffects_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem audioEditedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setRemoveVideo(true)
.setDurationUs(1_000_000L)
.setEffects(createAudioEffects(createVolumeScalingAudioProcessor(2f)))
.build();
Composition composition =
new Composition.Builder(new EditedMediaItemSequence(audioEditedMediaItem)).build();
player.setComposition(composition);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context, capturingAudioSink, "audiosinkdumps/" + FILE_AUDIO_RAW + "/highVolume.dump");
}
@Test
public void playback_singleAudioItemWithCompositionLevelEffects_outputsCorrectSamples()
throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem audioEditedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setRemoveVideo(true)
.setDurationUs(1_000_000L)
.build();
Composition composition =
new Composition.Builder(new EditedMediaItemSequence(audioEditedMediaItem))
.setEffects(createAudioEffects(createVolumeScalingAudioProcessor(2f)))
.build();
player.setComposition(composition);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context, capturingAudioSink, "audiosinkdumps/" + FILE_AUDIO_RAW + "/highVolume.dump");
}
@Test
public void playback_compositionWithClipping_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
MediaItem mediaItem1 =
new MediaItem.Builder()
.setUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setStartPositionMs(300)
.setEndPositionMs(800)
.build())
.build();
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(mediaItem1).setDurationUs(1_000_000L).build();
MediaItem mediaItem2 =
new MediaItem.Builder()
.setUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setStartPositionMs(100)
.setEndPositionMs(300)
.build())
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(mediaItem2).setDurationUs(348_000L).build();
Composition composition =
new Composition.Builder(new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2))
.build();
player.setComposition(composition);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context,
capturingAudioSink,
"audiosinkdumps/wav/sample.wav_clipped_then_sample_rf64_clipped.wav.dump");
}
@Test
public void seekTo_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItemSequence sequence = new EditedMediaItemSequence(editedMediaItem);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.seekTo(/* positionMs= */ 500);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context, capturingAudioSink, "audiosinkdumps/" + FILE_AUDIO_RAW + "/seek_to_500_ms.dump");
}
@Test
public void seekToNextMediaItem_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(
MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ))
.setDurationUs(348_000L)
.build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.seekTo(/* positionMs= */ 1200);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context,
capturingAudioSink,
"audiosinkdumps/wav/sample.wav_then_sample_rf64.wav_seek_to_1200_ms.dump");
}
@Test
public void seekToPreviousMediaItem_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(
MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ))
.setDurationUs(348_000L)
.build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.seekTo(/* positionMs= */ 1200);
player.seekTo(/* positionMs= */ 500);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context,
capturingAudioSink,
"audiosinkdumps/wav/sample.wav_then_sample_rf64.wav_seek_to_500_ms.dump");
}
@Test
public void seekTo_withClipping_outputsCorrectSamples() throws Exception {
CompositionPlayer player = createCompositionPlayer(context, capturingAudioSink);
MediaItem mediaItem1 =
new MediaItem.Builder()
.setUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setStartPositionMs(200)
.setEndPositionMs(900)
.build())
.build();
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(mediaItem1).setDurationUs(1_000_000L).build();
MediaItem mediaItem2 =
new MediaItem.Builder()
.setUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ)
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder()
.setStartPositionMs(100)
.setEndPositionMs(300)
.build())
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(mediaItem2).setDurationUs(348_000L).build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.seekTo(/* positionMs= */ 800);
player.prepare();
player.play();
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
DumpFileAsserts.assertOutput(
context,
capturingAudioSink,
"audiosinkdumps/wav/sample.wav_then_sample_rf64.wav_clipped_seek_to_800_ms.dump");
}
private static CompositionPlayer createCompositionPlayer(Context context, AudioSink audioSink) {
return new CompositionPlayer.Builder(context)
.setClock(new FakeClock(/* isAutoAdvancing= */ true))
.setAudioSink(audioSink)
.build();
}
}

View File

@ -0,0 +1,592 @@
/*
* Copyright 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static androidx.media3.transformer.TestUtil.ASSET_URI_PREFIX;
import static androidx.media3.transformer.TestUtil.FILE_AUDIO_RAW;
import static androidx.media3.transformer.TestUtil.FILE_AUDIO_RAW_STEREO_48000KHZ;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.view.Surface;
import android.view.TextureView;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.common.Player;
import androidx.media3.common.Timeline;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.NullableType;
import androidx.media3.exoplayer.audio.AudioSink;
import androidx.media3.exoplayer.audio.DefaultAudioSink;
import androidx.media3.exoplayer.audio.ForwardingAudioSink;
import androidx.media3.test.utils.FakeClock;
import androidx.media3.test.utils.robolectric.RobolectricUtil;
import androidx.media3.test.utils.robolectric.TestPlayerRunHelper;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Mockito;
/** Unit tests for {@link CompositionPlayer}. */
@RunWith(AndroidJUnit4.class)
public class CompositionPlayerTest {
private static final long TEST_TIMEOUT_MS = 1_000;
@Test
public void builder_buildCalledTwice_throws() {
CompositionPlayer.Builder builder =
new CompositionPlayer.Builder(ApplicationProvider.getApplicationContext());
CompositionPlayer player = builder.build();
assertThrows(IllegalStateException.class, builder::build);
player.release();
}
@Test
public void builder_buildCalledOnNonHandlerThread_throws() throws InterruptedException {
AtomicReference<@NullableType Exception> exception = new AtomicReference<>();
ConditionVariable conditionVariable = new ConditionVariable();
Thread thread =
new Thread(
() -> {
try {
new Composition.Builder(ApplicationProvider.getApplicationContext()).build();
} catch (Exception e) {
exception.set(e);
} finally {
conditionVariable.open();
}
});
thread.start();
conditionVariable.block();
thread.join();
assertThat(exception.get()).isNotNull();
}
@Test
public void instance_accessedByWrongThread_throws() throws InterruptedException {
CompositionPlayer player = buildCompositionPlayer();
AtomicReference<@NullableType RuntimeException> exception = new AtomicReference<>();
ConditionVariable conditionVariable = new ConditionVariable();
HandlerThread handlerThread = new HandlerThread("test");
handlerThread.start();
new Handler(handlerThread.getLooper())
.post(
() -> {
try {
player.setComposition(buildComposition());
} catch (RuntimeException e) {
exception.set(e);
} finally {
conditionVariable.open();
}
});
conditionVariable.block();
player.release();
handlerThread.quit();
handlerThread.join();
assertThat(exception.get()).isInstanceOf(IllegalStateException.class);
assertThat(exception.get()).hasMessageThat().contains("Player is accessed on the wrong thread");
}
@Test
public void instance_withSpecifiedApplicationLooper_callbacksDispatchedOnSpecifiedThread()
throws Exception {
HandlerThread applicationHandlerThread = new HandlerThread("app-thread");
applicationHandlerThread.start();
Looper applicationLooper = applicationHandlerThread.getLooper();
Handler applicationThreadHandler = new Handler(applicationLooper);
AtomicReference<Thread> callbackThread = new AtomicReference<>();
ConditionVariable eventsArrived = new ConditionVariable();
CompositionPlayer player =
createCompositionPlayerBuilder().setLooper(applicationLooper).build();
// Listeners can be added by any thread.
player.addListener(
new Player.Listener() {
@Override
public void onEvents(Player player, Player.Events events) {
callbackThread.set(Thread.currentThread());
eventsArrived.open();
}
});
applicationThreadHandler.post(
() -> {
player.setComposition(buildComposition());
player.prepare();
});
if (!eventsArrived.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException();
}
// Use a separate condition variable to releasing the player to avoid race conditions
// with the condition variable used for the callback.
ConditionVariable released = new ConditionVariable();
applicationThreadHandler.post(
() -> {
player.release();
released.open();
});
if (!released.block(TEST_TIMEOUT_MS)) {
throw new TimeoutException();
}
applicationHandlerThread.quit();
applicationHandlerThread.join();
assertThat(eventsArrived.isOpen()).isTrue();
assertThat(callbackThread.get()).isEqualTo(applicationLooper.getThread());
}
@Test
public void release_onNewlyCreateInstance() {
CompositionPlayer player = buildCompositionPlayer();
player.release();
}
@Test
public void release_audioFailsDuringRelease_onlyLogsError() throws Exception {
Log.Logger logger = mock(Log.Logger.class);
Log.setLogger(logger);
AudioSink audioSink =
new ForwardingAudioSink(
new DefaultAudioSink.Builder(ApplicationProvider.getApplicationContext()).build()) {
@Override
public void release() {
throw new RuntimeException("AudioSink release error");
}
};
CompositionPlayer player = createCompositionPlayerBuilder().setAudioSink(audioSink).build();
Player.Listener listener = mock(Player.Listener.class);
player.addListener(listener);
player.setComposition(buildComposition());
player.prepare();
TestPlayerRunHelper.run(player).untilState(Player.STATE_READY);
player.release();
verify(listener, never()).onPlayerError(any());
verify(logger)
.e(
eq("CompPlayerInternal"),
eq("error while releasing the player"),
argThat(
throwable ->
throwable instanceof RuntimeException
&& throwable.getMessage().contains("AudioSink release error")));
}
@Test
public void getAvailableCommands_returnsSpecificCommands() {
CompositionPlayer player = buildCompositionPlayer();
assertThat(getList(player.getAvailableCommands()))
.containsExactly(
Player.COMMAND_PLAY_PAUSE,
Player.COMMAND_PREPARE,
Player.COMMAND_STOP,
Player.COMMAND_SEEK_IN_CURRENT_MEDIA_ITEM,
Player.COMMAND_SEEK_BACK,
Player.COMMAND_SEEK_FORWARD,
Player.COMMAND_GET_CURRENT_MEDIA_ITEM,
Player.COMMAND_GET_TIMELINE,
Player.COMMAND_SET_VIDEO_SURFACE,
Player.COMMAND_RELEASE);
player.release();
}
@Test
public void setComposition_calledTwice_throws() {
Composition composition = buildComposition();
CompositionPlayer player = buildCompositionPlayer();
player.setComposition(composition);
assertThrows(IllegalStateException.class, () -> player.setComposition(composition));
player.release();
}
@Test
public void setComposition_threeSequences_throws() {
CompositionPlayer player = buildCompositionPlayer();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.EMPTY).setDurationUs(1_000).build();
Composition composition =
new Composition.Builder(
ImmutableList.of(
new EditedMediaItemSequence(editedMediaItem),
new EditedMediaItemSequence(editedMediaItem),
new EditedMediaItemSequence(editedMediaItem)))
.build();
assertThrows(IllegalArgumentException.class, () -> player.setComposition(composition));
player.release();
}
@Test
public void setComposition_unmatchingDurations_throws() {
CompositionPlayer player = buildCompositionPlayer();
Composition composition =
new Composition.Builder(
ImmutableList.of(
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.EMPTY).setDurationUs(1).build()),
new EditedMediaItemSequence(
new EditedMediaItem.Builder(MediaItem.EMPTY).setDurationUs(2).build())))
.build();
assertThrows(IllegalArgumentException.class, () -> player.setComposition(composition));
player.release();
}
@Test
public void prepare_withoutCompositionSet_throws() {
CompositionPlayer player = buildCompositionPlayer();
assertThrows(IllegalStateException.class, player::prepare);
player.release();
}
@Test
public void playWhenReady_calledBeforePrepare_startsPlayingAfterPrepareCalled() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
player.setPlayWhenReady(true);
player.setComposition(buildComposition());
player.prepare();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
player.release();
}
@Test
public void playWhenReady_triggersPlayWhenReadyCallbackWithReason() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
AtomicInteger playWhenReadyReason = new AtomicInteger(-1);
player.addListener(
new Player.Listener() {
@Override
public void onPlayWhenReadyChanged(
boolean playWhenReady, @Player.PlayWhenReadyChangeReason int reason) {
playWhenReadyReason.set(reason);
}
});
player.setPlayWhenReady(true);
RobolectricUtil.runMainLooperUntil(() -> playWhenReadyReason.get() != -1);
assertThat(playWhenReadyReason.get())
.isEqualTo(Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST);
}
@Test
public void setVideoTextureView_throws() {
Context context = ApplicationProvider.getApplicationContext();
CompositionPlayer player = buildCompositionPlayer();
assertThrows(
UnsupportedOperationException.class,
() -> player.setVideoTextureView(new TextureView(context)));
player.release();
}
@Test
public void setVideoSurface_withNonNullSurface_throws() {
CompositionPlayer player = buildCompositionPlayer();
Surface surface = new Surface(new SurfaceTexture(/* texName= */ 0));
assertThrows(UnsupportedOperationException.class, () -> player.setVideoSurface(surface));
player.release();
surface.release();
}
@Test
public void clearVideoSurface_specifiedSurfaceNotPreviouslySet_throws() {
CompositionPlayer player = buildCompositionPlayer();
assertThrows(
IllegalArgumentException.class,
() -> player.clearVideoSurface(new Surface(new SurfaceTexture(/* texName= */ 0))));
player.release();
}
@Test
public void getTotalBufferedDuration_playerStillIdle_returnsZero() {
CompositionPlayer player = buildCompositionPlayer();
assertThat(player.getTotalBufferedDuration()).isEqualTo(0);
player.release();
}
@Test
public void getTotalBufferedDuration_setCompositionButNotPrepare_returnsZero() {
CompositionPlayer player = buildCompositionPlayer();
player.setComposition(buildComposition());
assertThat(player.getTotalBufferedDuration()).isEqualTo(0);
player.release();
}
@Test
public void getTotalBufferedDuration_playerReady_returnsNonZero() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
player.setComposition(buildComposition());
player.prepare();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY);
assertThat(player.getTotalBufferedDuration()).isGreaterThan(0);
player.release();
}
@Test
public void getDuration_withoutComposition_returnsTimeUnset() {
CompositionPlayer player = buildCompositionPlayer();
assertThat(player.getDuration()).isEqualTo(C.TIME_UNSET);
player.release();
}
@Test
public void getDuration_withComposition_returnsDuration() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
Composition composition = buildComposition();
player.setComposition(composition);
player.prepare();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY);
// Refer to the durations in buildComposition().
assertThat(player.getDuration()).isEqualTo(1_348);
player.release();
}
@Test
public void addListener_callsSupportedCallbacks() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
Composition composition = buildComposition();
List<Integer> playbackStates = new ArrayList<>();
AtomicBoolean playing = new AtomicBoolean();
Player.Listener listener =
spy(
new Player.Listener() {
@Override
public void onPlaybackStateChanged(int playbackState) {
if (playbackStates.isEmpty()
|| Iterables.getLast(playbackStates) != playbackState) {
playbackStates.add(playbackState);
}
}
@Override
public void onIsPlayingChanged(boolean isPlaying) {
playing.set(isPlaying);
}
});
InOrder inOrder = Mockito.inOrder(listener);
player.setComposition(composition);
player.addListener(listener);
player.prepare();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_READY);
inOrder
.verify(listener)
.onTimelineChanged(any(), eq(Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED));
inOrder.verify(listener).onPlaybackStateChanged(Player.STATE_BUFFERING);
inOrder.verify(listener).onPlaybackStateChanged(Player.STATE_READY);
player.setPlayWhenReady(true);
// Ensure that Player.Listener.onIsPlayingChanged(true) is called.
RobolectricUtil.runMainLooperUntil(playing::get);
inOrder
.verify(listener)
.onPlayWhenReadyChanged(true, Player.PLAY_WHEN_READY_CHANGE_REASON_USER_REQUEST);
inOrder.verify(listener).onIsPlayingChanged(true);
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
inOrder.verify(listener).onPlaybackStateChanged(Player.STATE_ENDED);
player.release();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_IDLE);
inOrder.verify(listener).onPlaybackStateChanged(Player.STATE_IDLE);
assertThat(playbackStates)
.containsExactly(
Player.STATE_BUFFERING, Player.STATE_READY, Player.STATE_ENDED, Player.STATE_IDLE)
.inOrder();
}
@Test
public void addListener_callsOnEventsWithSupportedEvents() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
Composition composition = buildComposition();
Player.Listener mockListener = mock(Player.Listener.class);
ArgumentCaptor<Player.Events> eventsCaptor = ArgumentCaptor.forClass(Player.Events.class);
ImmutableSet<Integer> supportedEvents =
ImmutableSet.of(
Player.EVENT_TIMELINE_CHANGED,
Player.EVENT_MEDIA_ITEM_TRANSITION,
Player.EVENT_PLAYBACK_STATE_CHANGED,
Player.EVENT_PLAY_WHEN_READY_CHANGED,
Player.EVENT_IS_PLAYING_CHANGED);
player.setComposition(composition);
player.addListener(mockListener);
player.prepare();
player.play();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
player.release();
verify(mockListener, atLeastOnce()).onEvents(any(), eventsCaptor.capture());
List<Player.Events> eventsList = eventsCaptor.getAllValues();
for (Player.Events events : eventsList) {
assertThat(events.size()).isNotEqualTo(0);
for (int j = 0; j < events.size(); j++) {
assertThat(supportedEvents).contains(events.get(j));
}
}
}
@Test
public void play_withCorrectTimelineUpdated() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
Composition composition = buildComposition();
Player.Listener mockListener = mock(Player.Listener.class);
ArgumentCaptor<Timeline> timelineCaptor = ArgumentCaptor.forClass(Timeline.class);
ArgumentCaptor<Integer> timelineChangeReasonCaptor = ArgumentCaptor.forClass(Integer.class);
player.setComposition(composition);
player.addListener(mockListener);
player.prepare();
player.play();
TestPlayerRunHelper.runUntilPlaybackState(player, Player.STATE_ENDED);
player.release();
verify(mockListener)
.onTimelineChanged(timelineCaptor.capture(), timelineChangeReasonCaptor.capture());
assertThat(timelineCaptor.getAllValues()).hasSize(1);
assertThat(timelineChangeReasonCaptor.getAllValues()).hasSize(1);
Timeline timeline = timelineCaptor.getValue();
assertThat(timeline.getWindowCount()).isEqualTo(1);
assertThat(timeline.getPeriodCount()).isEqualTo(1);
// Refer to the durations in buildComposition().
assertThat(timeline.getWindow(/* windowIndex= */ 0, new Timeline.Window()).durationUs)
.isEqualTo(1_348_000L);
assertThat(timelineChangeReasonCaptor.getValue())
.isEqualTo(Player.TIMELINE_CHANGE_REASON_PLAYLIST_CHANGED);
}
@Test
public void seekPastDuration_ends() throws Exception {
CompositionPlayer player = buildCompositionPlayer();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItemSequence sequence = new EditedMediaItemSequence(editedMediaItem);
Composition composition = new Composition.Builder(sequence).build();
player.setComposition(composition);
player.prepare();
player.play();
player.seekTo(/* positionMs= */ 1100);
TestPlayerRunHelper.run(player).untilState(Player.STATE_ENDED);
player.release();
}
private static CompositionPlayer buildCompositionPlayer() {
return createCompositionPlayerBuilder().build();
}
private static CompositionPlayer.Builder createCompositionPlayerBuilder() {
return new CompositionPlayer.Builder(ApplicationProvider.getApplicationContext())
.setClock(new FakeClock(/* isAutoAdvancing= */ true));
}
private static Composition buildComposition() {
// Use raw audio-only assets which can be played in robolectric tests.
EditedMediaItem editedMediaItem1 =
new EditedMediaItem.Builder(MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW))
.setDurationUs(1_000_000L)
.build();
EditedMediaItem editedMediaItem2 =
new EditedMediaItem.Builder(
MediaItem.fromUri(ASSET_URI_PREFIX + FILE_AUDIO_RAW_STEREO_48000KHZ))
.setDurationUs(348_000L)
.build();
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(editedMediaItem1, editedMediaItem2);
return new Composition.Builder(sequence).build();
}
private static List<Integer> getList(Player.Commands commands) {
List<Integer> commandList = new ArrayList<>();
for (int i = 0; i < commands.size(); i++) {
commandList.add(commands.get(i));
}
return commandList;
}
}

View File

@ -0,0 +1,194 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Mockito.atMostOnce;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import androidx.media3.exoplayer.audio.AudioSink;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
/** Unit tests for {@link PreviewAudioPipeline}. */
@RunWith(AndroidJUnit4.class)
public class PreviewAudioPipelineTest {
@Rule public final MockitoRule mockito = MockitoJUnit.rule();
private PreviewAudioPipeline previewAudioPipeline;
@Mock AudioSink outputAudioSink;
@Before
public void setUp() {
previewAudioPipeline =
new PreviewAudioPipeline(
new DefaultAudioMixer.Factory(), /* effects= */ ImmutableList.of(), outputAudioSink);
}
@After
public void tearDown() {
previewAudioPipeline.release();
}
@Test
public void processData_noAudioSinksCreated_returnsFalse() throws Exception {
assertThat(previewAudioPipeline.processData()).isFalse();
}
@Test
public void processData_audioSinkHasNotConfiguredYet_returnsFalse() throws Exception {
AudioGraphInputAudioSink unused = previewAudioPipeline.createInput();
assertThat(previewAudioPipeline.processData()).isFalse();
}
@Test
public void inputPlay_withOneInput_playsOutputSink() throws Exception {
AudioGraphInputAudioSink inputAudioSink = previewAudioPipeline.createInput();
inputAudioSink.play();
verify(outputAudioSink).play();
}
@Test
public void inputPause_withOneInput_pausesOutputSink() throws Exception {
AudioGraphInputAudioSink inputAudioSink = previewAudioPipeline.createInput();
inputAudioSink.play();
inputAudioSink.pause();
verify(outputAudioSink).pause();
}
@Test
public void inputReset_withOneInput_pausesOutputSink() {
AudioGraphInputAudioSink inputAudioSink = previewAudioPipeline.createInput();
inputAudioSink.play();
inputAudioSink.reset();
verify(outputAudioSink).pause();
}
@Test
public void inputPlay_whenPlaying_doesNotPlayOutputSink() throws Exception {
AudioGraphInputAudioSink inputAudioSink = previewAudioPipeline.createInput();
inputAudioSink.play();
inputAudioSink.play();
verify(outputAudioSink, atMostOnce()).play();
}
@Test
public void inputPause_whenNotPlaying_doesNotPauseOutputSink() throws Exception {
AudioGraphInputAudioSink inputAudioSink = previewAudioPipeline.createInput();
inputAudioSink.pause();
verify(outputAudioSink, never()).pause();
}
@Test
public void someInputPlay_withMultipleInputs_doesNotPlayOutputSink() throws Exception {
AudioGraphInputAudioSink inputAudioSink1 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink2 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink unused = previewAudioPipeline.createInput();
inputAudioSink1.play();
inputAudioSink2.play();
verify(outputAudioSink, never()).play();
}
@Test
public void allInputPlay_withMultipleInputs_playsOutputSinkOnce() throws Exception {
AudioGraphInputAudioSink inputAudioSink1 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink2 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink3 = previewAudioPipeline.createInput();
inputAudioSink1.play();
inputAudioSink2.play();
inputAudioSink3.play();
verify(outputAudioSink, atMostOnce()).play();
}
@Test
public void firstInputPause_withMultipleInputs_pausesOutputSink() throws Exception {
InOrder inOrder = inOrder(outputAudioSink);
AudioGraphInputAudioSink inputAudioSink1 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink2 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink3 = previewAudioPipeline.createInput();
inputAudioSink1.play();
inputAudioSink2.play();
inputAudioSink3.play();
inputAudioSink2.pause();
inOrder.verify(outputAudioSink).pause();
inOrder.verifyNoMoreInteractions();
}
@Test
public void allInputPause_withMultipleInputs_pausesOutputSinkOnce() throws Exception {
AudioGraphInputAudioSink inputAudioSink1 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink2 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink3 = previewAudioPipeline.createInput();
inputAudioSink1.play();
inputAudioSink2.play();
inputAudioSink3.play();
inputAudioSink2.pause();
inputAudioSink1.pause();
inputAudioSink3.pause();
verify(outputAudioSink, atMostOnce()).pause();
}
@Test
public void inputPlayAfterPause_withMultipleInputs_playsOutputSink() throws Exception {
InOrder inOrder = inOrder(outputAudioSink);
AudioGraphInputAudioSink inputAudioSink1 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink2 = previewAudioPipeline.createInput();
AudioGraphInputAudioSink inputAudioSink3 = previewAudioPipeline.createInput();
inputAudioSink1.play();
inputAudioSink2.play();
inputAudioSink3.play();
inputAudioSink2.pause();
inputAudioSink1.pause();
inputAudioSink2.play();
inputAudioSink1.play();
inOrder.verify(outputAudioSink).play();
inOrder.verify(outputAudioSink).pause();
inOrder.verify(outputAudioSink).play();
Mockito.verifyNoMoreInteractions(outputAudioSink);
}
}