Publish transformer module

PiperOrigin-RevId: 353254249
This commit is contained in:
kimvde 2021-01-22 17:12:59 +00:00 committed by kim-vde
parent 5b9fa7d7d9
commit abccbcf247
25 changed files with 3810 additions and 0 deletions

View File

@ -139,6 +139,10 @@
* Remove `setVideoDecoderOutputBufferRenderer` from Player API. Use
`setVideoSurfaceView` and `clearVideoSurfaceView` instead.
* Replace `PlayerMessage.setHandler` with `PlayerMessage.setLooper`.
* Transformer:
* Add a library to transform media inputs. Available transformations are:
configuration of output container format, removal of audio or video
track and slow motion flattening.
* Extractors:
* Populate codecs string for H.264/AVC in MP4, Matroska and FLV streams to
allow decoder capability checks based on codec profile/level

View File

@ -28,6 +28,7 @@ include modulePrefix + 'library-dash'
include modulePrefix + 'library-extractor'
include modulePrefix + 'library-hls'
include modulePrefix + 'library-smoothstreaming'
include modulePrefix + 'library-transformer'
include modulePrefix + 'library-ui'
include modulePrefix + 'robolectricutils'
include modulePrefix + 'testutils'
@ -56,6 +57,7 @@ project(modulePrefix + 'library-dash').projectDir = new File(rootDir, 'library/d
project(modulePrefix + 'library-extractor').projectDir = new File(rootDir, 'library/extractor')
project(modulePrefix + 'library-hls').projectDir = new File(rootDir, 'library/hls')
project(modulePrefix + 'library-smoothstreaming').projectDir = new File(rootDir, 'library/smoothstreaming')
project(modulePrefix + 'library-transformer').projectDir = new File(rootDir, 'library/transformer')
project(modulePrefix + 'library-ui').projectDir = new File(rootDir, 'library/ui')
project(modulePrefix + 'robolectricutils').projectDir = new File(rootDir, 'robolectricutils')
project(modulePrefix + 'testutils').projectDir = new File(rootDir, 'testutils')

View File

@ -0,0 +1,10 @@
# ExoPlayer transformer library module #
Provides support for transforming media files.
## Links ##
* [Javadoc][]: Classes matching `com.google.android.exoplayer2.transformer.*`
belong to this module.
[Javadoc]: https://exoplayer.dev/doc/reference/index.html

View File

@ -0,0 +1,47 @@
// Copyright 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
apply from: "$gradle.ext.exoplayerSettingsDir/common_library_config.gradle"
android {
buildTypes {
debug {
testCoverageEnabled = true
}
}
sourceSets.test.assets.srcDir '../../testdata/src/test/assets/'
}
dependencies {
implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
implementation project(modulePrefix + 'library-core')
compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
compileOnly 'org.checkerframework:checker-compat-qual:' + checkerframeworkCompatVersion
compileOnly 'org.jetbrains.kotlin:kotlin-annotations-jvm:' + kotlinAnnotationsVersion
testImplementation project(modulePrefix + 'robolectricutils')
testImplementation project(modulePrefix + 'testutils')
testImplementation project(modulePrefix + 'testdata')
testImplementation 'org.robolectric:robolectric:' + robolectricVersion
}
ext {
javadocTitle = 'Transformer module'
}
apply from: '../../javadoc_library.gradle'
ext {
releaseArtifact = 'exoplayer-transformer'
releaseDescription = 'The ExoPlayer library transformer module.'
}
apply from: '../../publish.gradle'

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2020 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<manifest package="com.google.android.exoplayer2.transformer"/>

View File

@ -0,0 +1,302 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.mediacodec.MediaCodecAdapter;
import com.google.android.exoplayer2.mediacodec.MediaFormatUtil;
import com.google.android.exoplayer2.mediacodec.SynchronousMediaCodecAdapter;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* A wrapper around {@link MediaCodecAdapter}.
*
* <p>Provides a layer of abstraction for callers that need to interact with {@link MediaCodec}
* through {@link MediaCodecAdapter}. This is done by simplifying the calls needed to queue and
* dequeue buffers, removing the need to track buffer indices and codec events.
*/
/* package */ final class MediaCodecAdapterWrapper {
private final BufferInfo outputBufferInfo;
private final MediaCodecAdapter codec;
private final Format format;
@Nullable private ByteBuffer outputBuffer;
private int inputBufferIndex;
private int outputBufferIndex;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean hasOutputFormat;
/**
* Returns a {@link MediaCodecAdapterWrapper} for a configured and started {@link
* MediaCodecAdapter} audio decoder.
*
* @param format The {@link Format} (of the input data) used to determine the underlying {@link
* MediaCodec} and its configuration values.
* @return A configured and started decoder wrapper.
* @throws IOException If the underlying codec cannot be created.
*/
@RequiresNonNull("#1.sampleMimeType")
public static MediaCodecAdapterWrapper createForAudioDecoding(Format format) throws IOException {
@Nullable MediaCodec decoder = null;
@Nullable MediaCodecAdapter adapter = null;
try {
decoder = MediaCodec.createDecoderByType(format.sampleMimeType);
MediaFormat mediaFormat =
MediaFormat.createAudioFormat(
format.sampleMimeType, format.sampleRate, format.channelCount);
MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
adapter = new SynchronousMediaCodecAdapter.Factory().createAdapter(decoder);
adapter.configure(mediaFormat, /* surface= */ null, /* crypto= */ null, /* flags= */ 0);
adapter.start();
return new MediaCodecAdapterWrapper(adapter, format);
} catch (Exception e) {
if (adapter != null) {
adapter.release();
} else if (decoder != null) {
decoder.release();
}
throw e;
}
}
/**
* Returns a {@link MediaCodecAdapterWrapper} for a configured and started {@link
* MediaCodecAdapter} audio encoder.
*
* @param format The {@link Format} (of the output data) used to determine the underlying {@link
* MediaCodec} and its configuration values.
* @return A configured and started encoder wrapper.
* @throws IOException If the underlying codec cannot be created.
*/
@RequiresNonNull("#1.sampleMimeType")
public static MediaCodecAdapterWrapper createForAudioEncoding(Format format) throws IOException {
@Nullable MediaCodec encoder = null;
@Nullable MediaCodecAdapter adapter = null;
try {
encoder = MediaCodec.createEncoderByType(format.sampleMimeType);
MediaFormat mediaFormat =
MediaFormat.createAudioFormat(
format.sampleMimeType, format.sampleRate, format.channelCount);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, format.bitrate);
adapter = new SynchronousMediaCodecAdapter.Factory().createAdapter(encoder);
adapter.configure(
mediaFormat,
/* surface= */ null,
/* crypto= */ null,
/* flags= */ MediaCodec.CONFIGURE_FLAG_ENCODE);
adapter.start();
return new MediaCodecAdapterWrapper(adapter, format);
} catch (Exception e) {
if (adapter != null) {
adapter.release();
} else if (encoder != null) {
encoder.release();
}
throw e;
}
}
private MediaCodecAdapterWrapper(MediaCodecAdapter codec, Format format) {
this.codec = codec;
this.format = format;
outputBufferInfo = new BufferInfo();
inputBufferIndex = C.INDEX_UNSET;
outputBufferIndex = C.INDEX_UNSET;
}
/**
* Dequeues a writable input buffer, if available.
*
* @param inputBuffer The buffer where the dequeued buffer data is stored.
* @return Whether an input buffer is ready to be used.
*/
@EnsuresNonNullIf(expression = "#1.data", result = true)
public boolean maybeDequeueInputBuffer(DecoderInputBuffer inputBuffer) {
if (inputStreamEnded) {
return false;
}
if (inputBufferIndex < 0) {
inputBufferIndex = codec.dequeueInputBufferIndex();
if (inputBufferIndex < 0) {
return false;
}
inputBuffer.data = codec.getInputBuffer(inputBufferIndex);
inputBuffer.clear();
}
checkNotNull(inputBuffer.data);
return true;
}
/**
* Queues an input buffer.
*
* @param inputBuffer The buffer to be queued.
* @return Whether more input buffers can be queued.
*/
public boolean queueInputBuffer(DecoderInputBuffer inputBuffer) {
checkState(
!inputStreamEnded, "Input buffer can not be queued after the input stream has ended.");
int offset = 0;
int size = 0;
if (inputBuffer.data != null && inputBuffer.data.hasRemaining()) {
offset = inputBuffer.data.position();
size = inputBuffer.data.remaining();
}
int flags = 0;
if (inputBuffer.isEndOfStream()) {
inputStreamEnded = true;
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
codec.queueInputBuffer(inputBufferIndex, offset, size, inputBuffer.timeUs, flags);
inputBufferIndex = C.INDEX_UNSET;
inputBuffer.data = null;
return !inputStreamEnded;
}
/**
* Dequeues an output buffer, if available.
*
* <p>Once this method returns {@code true}, call {@link #getOutputBuffer()} to access the
* dequeued buffer.
*
* @return Whether an output buffer is available.
*/
public boolean maybeDequeueOutputBuffer() {
if (outputBufferIndex >= 0) {
return true;
}
if (outputStreamEnded) {
return false;
}
outputBufferIndex = codec.dequeueOutputBufferIndex(outputBufferInfo);
if (outputBufferIndex < 0) {
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED && !hasOutputFormat) {
hasOutputFormat = true;
}
return false;
}
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
outputStreamEnded = true;
if (outputBufferInfo.size == 0) {
releaseOutputBuffer();
return false;
}
}
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// Encountered a CSD buffer, skip it.
releaseOutputBuffer();
return false;
}
outputBuffer = checkNotNull(codec.getOutputBuffer(outputBufferIndex));
outputBuffer.position(outputBufferInfo.offset);
outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
return true;
}
/**
* Returns a {@link Format} based on the {@link MediaCodecAdapter#getOutputFormat() mediaFormat},
* if available.
*/
@Nullable
public Format getOutputFormat() {
@Nullable MediaFormat mediaFormat = hasOutputFormat ? codec.getOutputFormat() : null;
if (mediaFormat == null) {
return null;
}
ImmutableList.Builder<byte[]> csdBuffers = new ImmutableList.Builder<>();
int csdIndex = 0;
while (true) {
@Nullable ByteBuffer csdByteBuffer = mediaFormat.getByteBuffer("csd-" + csdIndex);
if (csdByteBuffer == null) {
break;
}
byte[] csdBufferData = new byte[csdByteBuffer.remaining()];
csdByteBuffer.get(csdBufferData);
csdBuffers.add(csdBufferData);
csdIndex++;
}
return new Format.Builder()
.setSampleMimeType(mediaFormat.getString(MediaFormat.KEY_MIME))
.setChannelCount(mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT))
.setSampleRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE))
.setInitializationData(csdBuffers.build())
.build();
}
/** Returns the {@link Format} used to create and configure the underlying {@link MediaCodec}. */
public Format getConfigFormat() {
return format;
}
/** Returns the current output {@link ByteBuffer}, if available. */
@Nullable
public ByteBuffer getOutputBuffer() {
return outputBuffer;
}
/** Returns the {@link BufferInfo} associated with the current output buffer, if available. */
@Nullable
public BufferInfo getOutputBufferInfo() {
return outputBuffer == null ? null : outputBufferInfo;
}
/**
* Releases the current output buffer.
*
* <p>This should be called after the buffer has been processed. The next output buffer will not
* be available until the previous has been released.
*/
public void releaseOutputBuffer() {
outputBuffer = null;
codec.releaseOutputBuffer(outputBufferIndex, /* render= */ false);
outputBufferIndex = C.INDEX_UNSET;
}
/** Returns whether the codec output stream has ended, and no more data can be dequeued. */
public boolean isEnded() {
return outputStreamEnded && outputBufferIndex == C.INDEX_UNSET;
}
/** Releases the underlying codec. */
public void release() {
outputBuffer = null;
codec.release();
}
}

View File

@ -0,0 +1,358 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Util.SDK_INT;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import static com.google.android.exoplayer2.util.Util.minValue;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.ParcelFileDescriptor;
import android.util.SparseIntArray;
import android.util.SparseLongArray;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.mediacodec.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
/**
* A wrapper around a media muxer.
*
* <p>This wrapper can contain at most one video track and one audio track.
*/
@RequiresApi(18)
/* package */ final class MuxerWrapper {
/**
* The maximum difference between the track positions, in microseconds.
*
* <p>The value of this constant has been chosen based on the interleaving observed in a few media
* files, where continuous chunks of the same track were about 0.5 seconds long.
*/
private static final long MAX_TRACK_WRITE_AHEAD_US = C.msToUs(500);
private final MediaMuxer mediaMuxer;
private final String outputMimeType;
private final SparseIntArray trackTypeToIndex;
private final SparseLongArray trackTypeToTimeUs;
private final MediaCodec.BufferInfo bufferInfo;
private int trackCount;
private int trackFormatCount;
private boolean isReady;
private int previousTrackType;
private long minTrackTimeUs;
/**
* Constructs an instance.
*
* @param path The path to the output file.
* @param outputMimeType The {@link MimeTypes MIME type} of the output.
* @throws IllegalArgumentException If the path is invalid or the MIME type is not supported.
* @throws IOException If an error occurs opening the output file for writing.
*/
public MuxerWrapper(String path, String outputMimeType) throws IOException {
this(new MediaMuxer(path, mimeTypeToMuxerOutputFormat(outputMimeType)), outputMimeType);
}
/**
* Constructs an instance.
*
* @param parcelFileDescriptor A readable and writable {@link ParcelFileDescriptor} of the output.
* The file referenced by this ParcelFileDescriptor should not be used before the muxer is
* released. It is the responsibility of the caller to close the ParcelFileDescriptor. This
* can be done after this constructor returns.
* @param outputMimeType The {@link MimeTypes MIME type} of the output.
* @throws IllegalArgumentException If the file descriptor is invalid or the MIME type is not
* supported.
* @throws IOException If an error occurs opening the output file for writing.
*/
@RequiresApi(26)
public MuxerWrapper(ParcelFileDescriptor parcelFileDescriptor, String outputMimeType)
throws IOException {
this(
new MediaMuxer(
parcelFileDescriptor.getFileDescriptor(), mimeTypeToMuxerOutputFormat(outputMimeType)),
outputMimeType);
}
private MuxerWrapper(MediaMuxer mediaMuxer, String outputMimeType) {
this.mediaMuxer = mediaMuxer;
this.outputMimeType = outputMimeType;
trackTypeToIndex = new SparseIntArray();
trackTypeToTimeUs = new SparseLongArray();
bufferInfo = new MediaCodec.BufferInfo();
previousTrackType = C.TRACK_TYPE_NONE;
}
/**
* Registers an output track.
*
* <p>All tracks must be registered before any track format is {@link #addTrackFormat(Format)
* added}.
*
* @throws IllegalStateException If a track format was {@link #addTrackFormat(Format) added}
* before calling this method.
*/
public void registerTrack() {
checkState(
trackFormatCount == 0, "Tracks cannot be registered after track formats have been added.");
trackCount++;
}
/**
* Adds a track format to the muxer.
*
* <p>The tracks must all be {@link #registerTrack() registered} before any format is added and
* all the formats must be added before samples are {@link #writeSample(int, ByteBuffer, boolean,
* long) written}.
*
* @param format The {@link Format} to be added.
* @throws IllegalArgumentException If the format is invalid.
* @throws IllegalStateException If the format is unsupported, if there is already a track format
* of the same type (audio or video) or if the muxer is in the wrong state.
*/
public void addTrackFormat(Format format) {
checkState(trackCount > 0, "All tracks should be registered before the formats are added.");
checkState(trackFormatCount < trackCount, "All track formats have already been added.");
@Nullable String sampleMimeType = format.sampleMimeType;
boolean isAudio = MimeTypes.isAudio(sampleMimeType);
boolean isVideo = MimeTypes.isVideo(sampleMimeType);
checkState(isAudio || isVideo, "Unsupported track format: " + sampleMimeType);
int trackType = MimeTypes.getTrackType(sampleMimeType);
checkState(
trackTypeToIndex.get(trackType, /* valueIfKeyNotFound= */ C.INDEX_UNSET) == C.INDEX_UNSET,
"There is already a track of type " + trackType);
MediaFormat mediaFormat;
if (isAudio) {
mediaFormat =
MediaFormat.createAudioFormat(
castNonNull(sampleMimeType), format.sampleRate, format.channelCount);
} else {
mediaFormat =
MediaFormat.createVideoFormat(castNonNull(sampleMimeType), format.width, format.height);
mediaMuxer.setOrientationHint(format.rotationDegrees);
}
MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData);
int trackIndex = mediaMuxer.addTrack(mediaFormat);
trackTypeToIndex.put(trackType, trackIndex);
trackTypeToTimeUs.put(trackType, 0L);
trackFormatCount++;
if (trackFormatCount == trackCount) {
mediaMuxer.start();
isReady = true;
}
}
/**
* Attempts to write a sample to the muxer.
*
* @param trackType The track type of the sample, defined by the {@code TRACK_TYPE_*} constants in
* {@link C}.
* @param data The sample to write, or {@code null} if the sample is empty.
* @param isKeyFrame Whether the sample is a key frame.
* @param presentationTimeUs The presentation time of the sample in microseconds.
* @return Whether the sample was successfully written. This is {@code false} if the muxer hasn't
* {@link #addTrackFormat(Format) received a format} for every {@link #registerTrack()
* registered track}, or if it should write samples of other track types first to ensure a
* good interleaving.
* @throws IllegalArgumentException If the sample in {@code buffer} is invalid.
* @throws IllegalStateException If the muxer doesn't have any {@link #endTrack(int) non-ended}
* track of the given track type or if the muxer is in the wrong state.
*/
public boolean writeSample(
int trackType, @Nullable ByteBuffer data, boolean isKeyFrame, long presentationTimeUs) {
int trackIndex = trackTypeToIndex.get(trackType, /* valueIfKeyNotFound= */ C.INDEX_UNSET);
checkState(
trackIndex != C.INDEX_UNSET,
"Could not write sample because there is no track of type " + trackType);
if (!canWriteSampleOfType(trackType)) {
return false;
} else if (data == null) {
return true;
}
int offset = data.position();
int size = data.limit() - offset;
int flags = isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0;
bufferInfo.set(offset, size, presentationTimeUs, flags);
mediaMuxer.writeSampleData(trackIndex, data, bufferInfo);
trackTypeToTimeUs.put(trackType, presentationTimeUs);
previousTrackType = trackType;
return true;
}
/**
* Notifies the muxer that all the samples have been {@link #writeSample(int, ByteBuffer, boolean,
* long) written} for a given track.
*
* @param trackType The track type, defined by the {@code TRACK_TYPE_*} constants in {@link C}.
*/
public void endTrack(int trackType) {
trackTypeToIndex.delete(trackType);
trackTypeToTimeUs.delete(trackType);
}
/**
* Stops the muxer.
*
* <p>The muxer cannot be used anymore once it is stopped.
*
* @throws IllegalStateException If the muxer is in the wrong state (for example if it didn't
* receive any samples).
*/
public void stop() {
if (!isReady) {
return;
}
isReady = false;
try {
mediaMuxer.stop();
} catch (IllegalStateException e) {
if (SDK_INT < 30) {
// Set the muxer state to stopped even if mediaMuxer.stop() failed so that
// mediaMuxer.release() doesn't attempt to stop the muxer and therefore doesn't throw the
// same exception without releasing its resources. This is already implemented in MediaMuxer
// from API level 30.
try {
Field muxerStoppedStateField = MediaMuxer.class.getDeclaredField("MUXER_STATE_STOPPED");
muxerStoppedStateField.setAccessible(true);
int muxerStoppedState = castNonNull((Integer) muxerStoppedStateField.get(mediaMuxer));
Field muxerStateField = MediaMuxer.class.getDeclaredField("mState");
muxerStateField.setAccessible(true);
muxerStateField.set(mediaMuxer, muxerStoppedState);
} catch (Exception reflectionException) {
// Do nothing.
}
}
throw e;
}
}
/**
* Releases the muxer.
*
* <p>The muxer cannot be used anymore once it is released.
*/
public void release() {
isReady = false;
mediaMuxer.release();
}
/** Returns the number of {@link #registerTrack() registered} tracks. */
public int getTrackCount() {
return trackCount;
}
/**
* Returns whether the sample {@link MimeTypes MIME type} is supported.
*
* <p>Supported sample formats are documented in {@link MediaMuxer#addTrack(MediaFormat)}.
*/
public boolean supportsSampleMimeType(@Nullable String mimeType) {
boolean isAudio = MimeTypes.isAudio(mimeType);
boolean isVideo = MimeTypes.isVideo(mimeType);
if (outputMimeType.equals(MimeTypes.VIDEO_MP4)) {
if (isVideo) {
return MimeTypes.VIDEO_H263.equals(mimeType)
|| MimeTypes.VIDEO_H264.equals(mimeType)
|| MimeTypes.VIDEO_MP4V.equals(mimeType)
|| (Util.SDK_INT >= 24 && MimeTypes.VIDEO_H265.equals(mimeType));
} else if (isAudio) {
return MimeTypes.AUDIO_AAC.equals(mimeType)
|| MimeTypes.AUDIO_AMR_NB.equals(mimeType)
|| MimeTypes.AUDIO_AMR_WB.equals(mimeType);
}
} else if (outputMimeType.equals(MimeTypes.VIDEO_WEBM) && SDK_INT >= 21) {
if (isVideo) {
return MimeTypes.VIDEO_VP8.equals(mimeType)
|| (Util.SDK_INT >= 24 && MimeTypes.VIDEO_VP9.equals(mimeType));
} else if (isAudio) {
return MimeTypes.AUDIO_VORBIS.equals(mimeType);
}
}
return false;
}
/**
* Returns whether the {@link MimeTypes MIME type} provided is a supported muxer output format.
*/
public static boolean supportsOutputMimeType(String mimeType) {
try {
mimeTypeToMuxerOutputFormat(mimeType);
} catch (IllegalStateException e) {
return false;
}
return true;
}
/**
* Returns whether the muxer can write a sample of the given track type.
*
* @param trackType The track type, defined by the {@code TRACK_TYPE_*} constants in {@link C}.
* @return Whether the muxer can write a sample of the given track type. This is {@code false} if
* the muxer hasn't {@link #addTrackFormat(Format) received a format} for every {@link
* #registerTrack() registered track}, or if it should write samples of other track types
* first to ensure a good interleaving.
* @throws IllegalStateException If the muxer doesn't have any {@link #endTrack(int) non-ended}
* track of the given track type.
*/
private boolean canWriteSampleOfType(int trackType) {
long trackTimeUs = trackTypeToTimeUs.get(trackType, /* valueIfKeyNotFound= */ C.TIME_UNSET);
checkState(trackTimeUs != C.TIME_UNSET);
if (!isReady) {
return false;
}
if (trackTypeToTimeUs.size() == 1) {
return true;
}
if (trackType != previousTrackType) {
minTrackTimeUs = minValue(trackTypeToTimeUs);
}
return trackTimeUs - minTrackTimeUs <= MAX_TRACK_WRITE_AHEAD_US;
}
/**
* Converts a {@link MimeTypes MIME type} into a {@link MediaMuxer.OutputFormat MediaMuxer output
* format}.
*
* @param mimeType The {@link MimeTypes MIME type} to convert.
* @return The corresponding {@link MediaMuxer.OutputFormat MediaMuxer output format}.
* @throws IllegalArgumentException If the {@link MimeTypes MIME type} is not supported as output
* format.
*/
private static int mimeTypeToMuxerOutputFormat(String mimeType) {
if (mimeType.equals(MimeTypes.VIDEO_MP4)) {
return MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
} else if (SDK_INT >= 21 && mimeType.equals(MimeTypes.VIDEO_WEBM)) {
return MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM;
} else {
throw new IllegalArgumentException("Unsupported output MIME type: " + mimeType);
}
}
}

View File

@ -0,0 +1,27 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import androidx.annotation.IntRange;
/** Holds a progress percentage. */
public final class ProgressHolder {
/** The held progress, expressed as an integer percentage. */
@IntRange(from = 0, to = 100)
public int progress;
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
/** A sample transformer for a given track. */
/* package */ interface SampleTransformer {
/**
* Transforms the data and metadata of the sample contained in {@code buffer}.
*
* @param buffer The sample to transform. If the sample {@link DecoderInputBuffer#data data} is
* {@code null} after the execution of this method, the sample must be discarded.
*/
void transformSample(DecoderInputBuffer buffer);
}

View File

@ -0,0 +1,397 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.NalUnitUtil.NAL_START_CODE;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import static java.lang.Math.min;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData;
import com.google.android.exoplayer2.metadata.mp4.SmtaMetadataEntry;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* {@link SampleTransformer} that flattens SEF slow motion video samples.
*
* <p>Such samples follow the ITU-T Recommendation H.264 with temporal SVC.
*
* <p>This transformer leaves the samples received unchanged if the input is not an SEF slow motion
* video.
*
* <p>The mathematical formulas used in this class are explained in [Internal ref:
* http://go/exoplayer-sef-slomo-video-flattening].
*/
/* package */ final class SefSlowMotionVideoSampleTransformer implements SampleTransformer {
/**
* The frame rate of SEF slow motion videos, in fps.
*
* <p>This frame rate is constant and is not equal to the capture frame rate. It is set to a lower
* value so that the video is entirely played in slow motion on players that do not support SEF
* slow motion.
*/
@VisibleForTesting /* package */ static final int INPUT_FRAME_RATE = 30;
/**
* The target frame rate of the flattened output, in fps.
*
* <p>The output frame rate might be slightly different and might not be constant.
*/
private static final int TARGET_OUTPUT_FRAME_RATE = 30;
private static final int NAL_START_CODE_LENGTH = NAL_START_CODE.length;
/**
* The nal_unit_type corresponding to a prefix NAL unit (see ITU-T Recommendation H.264 (2016)
* table 7-1).
*/
private static final int NAL_UNIT_TYPE_PREFIX = 0x0E;
private final byte[] scratch;
/** The SEF slow motion configuration of the input. */
@Nullable private final SlowMotionData slowMotionData;
/**
* An iterator iterating over the slow motion segments, pointing at the segment following {@code
* nextSegmentInfo}, if any.
*/
private final Iterator<SlowMotionData.Segment> segmentIterator;
/** The frame rate at which the input has been captured, in fps. */
private final float captureFrameRate;
/** The maximum SVC temporal layer present in the input. */
private final int inputMaxLayer;
/**
* The maximum SVC temporal layer value of the frames that should be kept in the input (or a part
* of it) so that it is played at normal speed.
*/
private final int normalSpeedMaxLayer;
/**
* The {@link SegmentInfo} describing the current slow motion segment, or null if the current
* frame is not in such a segment.
*/
@Nullable private SegmentInfo currentSegmentInfo;
/**
* The {@link SegmentInfo} describing the slow motion segment following (not including) the
* current frame, or null if there is no such segment.
*/
@Nullable private SegmentInfo nextSegmentInfo;
/**
* The time delta to be added to the output timestamps before scaling to take the slow motion
* segments into account, in microseconds.
*/
private long frameTimeDeltaUs;
public SefSlowMotionVideoSampleTransformer(Format format) {
scratch = new byte[NAL_START_CODE_LENGTH];
MetadataInfo metadataInfo = getMetadataInfo(format.metadata);
slowMotionData = metadataInfo.slowMotionData;
List<SlowMotionData.Segment> segments =
slowMotionData != null ? slowMotionData.segments : ImmutableList.of();
segmentIterator = segments.iterator();
captureFrameRate = metadataInfo.captureFrameRate;
inputMaxLayer = metadataInfo.inputMaxLayer;
normalSpeedMaxLayer = metadataInfo.normalSpeedMaxLayer;
nextSegmentInfo =
segmentIterator.hasNext()
? new SegmentInfo(segmentIterator.next(), inputMaxLayer, normalSpeedMaxLayer)
: null;
if (slowMotionData != null) {
checkArgument(
MimeTypes.VIDEO_H264.equals(format.sampleMimeType),
"Unsupported MIME type for SEF slow motion video track: " + format.sampleMimeType);
}
}
@Override
public void transformSample(DecoderInputBuffer buffer) {
if (slowMotionData == null) {
// The input is not an SEF slow motion video.
return;
}
ByteBuffer data = castNonNull(buffer.data);
int originalPosition = data.position();
data.position(originalPosition + NAL_START_CODE_LENGTH);
data.get(scratch, 0, 4); // Read nal_unit_header_svc_extension.
int nalUnitType = scratch[0] & 0x1F;
boolean svcExtensionFlag = ((scratch[1] & 0xFF) >> 7) == 1;
checkState(
nalUnitType == NAL_UNIT_TYPE_PREFIX && svcExtensionFlag,
"Missing SVC extension prefix NAL unit.");
int layer = (scratch[3] & 0xFF) >> 5;
boolean shouldKeepFrame = processCurrentFrame(layer, buffer.timeUs);
if (shouldKeepFrame) {
buffer.timeUs = getCurrentFrameOutputTimeUs(/* inputTimeUs= */ buffer.timeUs);
skipToNextNalUnit(data); // Skip over prefix_nal_unit_svc.
} else {
buffer.data = null;
}
}
/**
* Processes the current frame and returns whether it should be kept.
*
* @param layer The frame temporal SVC layer.
* @param timeUs The frame presentation time, in microseconds.
* @return Whether to keep the current frame.
*/
@VisibleForTesting
/* package */ boolean processCurrentFrame(int layer, long timeUs) {
// Skip segments in the unlikely case that they do not contain any frame start time.
while (nextSegmentInfo != null && timeUs >= nextSegmentInfo.endTimeUs) {
enterNextSegment();
}
if (nextSegmentInfo != null && timeUs >= nextSegmentInfo.startTimeUs) {
enterNextSegment();
} else if (currentSegmentInfo != null && timeUs >= currentSegmentInfo.endTimeUs) {
leaveCurrentSegment();
}
int maxLayer = currentSegmentInfo != null ? currentSegmentInfo.maxLayer : normalSpeedMaxLayer;
return layer <= maxLayer || shouldKeepFrameForOutputValidity(layer, timeUs);
}
/** Updates the segments information so that the next segment becomes the current segment. */
private void enterNextSegment() {
if (currentSegmentInfo != null) {
leaveCurrentSegment();
}
currentSegmentInfo = nextSegmentInfo;
nextSegmentInfo =
segmentIterator.hasNext()
? new SegmentInfo(segmentIterator.next(), inputMaxLayer, normalSpeedMaxLayer)
: null;
}
/**
* Updates the segments information so that there is no current segment. The next segment is
* unchanged.
*/
@RequiresNonNull("currentSegmentInfo")
private void leaveCurrentSegment() {
frameTimeDeltaUs +=
(currentSegmentInfo.endTimeUs - currentSegmentInfo.startTimeUs)
* (currentSegmentInfo.speedDivisor - 1);
currentSegmentInfo = null;
}
/**
* Returns whether the frames of the next segment are based on the current frame. In this case,
* the current frame should be kept in order for the output to be valid.
*
* @param layer The frame temporal SVC layer.
* @param timeUs The frame presentation time, in microseconds.
* @return Whether to keep the current frame.
*/
private boolean shouldKeepFrameForOutputValidity(int layer, long timeUs) {
if (nextSegmentInfo == null || layer >= nextSegmentInfo.maxLayer) {
return false;
}
long frameOffsetToSegmentEstimate =
(nextSegmentInfo.startTimeUs - timeUs) * INPUT_FRAME_RATE / C.MICROS_PER_SECOND;
float allowedError = 0.45f;
float baseMaxFrameOffsetToSegment =
-(1 << (inputMaxLayer - nextSegmentInfo.maxLayer)) + allowedError;
for (int i = 1; i < nextSegmentInfo.maxLayer; i++) {
if (frameOffsetToSegmentEstimate < (1 << (inputMaxLayer - i)) + baseMaxFrameOffsetToSegment) {
if (layer <= i) {
return true;
}
} else {
return false;
}
}
return false;
}
/**
* Returns the time of the current frame in the output, in microseconds.
*
* <p>This time is computed so that segments start and end at the correct times. As a result, the
* output frame rate might be variable.
*
* <p>This method can only be called if all the frames until the current one (included) have been
* {@link #processCurrentFrame(int, long) processed} in order, and if the next frames have not
* been processed yet.
*/
@VisibleForTesting
/* package */ long getCurrentFrameOutputTimeUs(long inputTimeUs) {
long outputTimeUs = inputTimeUs + frameTimeDeltaUs;
if (currentSegmentInfo != null) {
outputTimeUs +=
(inputTimeUs - currentSegmentInfo.startTimeUs) * (currentSegmentInfo.speedDivisor - 1);
}
return Math.round(outputTimeUs * INPUT_FRAME_RATE / captureFrameRate);
}
/**
* Advances the position of {@code data} to the start of the next NAL unit.
*
* @throws IllegalStateException If no NAL unit is found.
*/
private void skipToNextNalUnit(ByteBuffer data) {
int newPosition = data.position();
while (data.remaining() >= NAL_START_CODE_LENGTH) {
data.get(scratch, 0, NAL_START_CODE_LENGTH);
if (Arrays.equals(scratch, NAL_START_CODE)) {
data.position(newPosition);
return;
}
newPosition++;
data.position(newPosition);
}
throw new IllegalStateException("Could not find NAL unit start code.");
}
/** Returns the {@link MetadataInfo} derived from the {@link Metadata} provided. */
private static MetadataInfo getMetadataInfo(@Nullable Metadata metadata) {
MetadataInfo metadataInfo = new MetadataInfo();
if (metadata == null) {
return metadataInfo;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
SmtaMetadataEntry smtaMetadataEntry = (SmtaMetadataEntry) entry;
metadataInfo.captureFrameRate = smtaMetadataEntry.captureFrameRate;
metadataInfo.inputMaxLayer = smtaMetadataEntry.svcTemporalLayerCount - 1;
} else if (entry instanceof SlowMotionData) {
metadataInfo.slowMotionData = (SlowMotionData) entry;
}
}
if (metadataInfo.slowMotionData == null) {
return metadataInfo;
}
checkState(metadataInfo.inputMaxLayer != C.INDEX_UNSET, "SVC temporal layer count not found.");
checkState(metadataInfo.captureFrameRate != C.RATE_UNSET, "Capture frame rate not found.");
checkState(
metadataInfo.captureFrameRate % 1 == 0
&& metadataInfo.captureFrameRate % TARGET_OUTPUT_FRAME_RATE == 0,
"Invalid capture frame rate: " + metadataInfo.captureFrameRate);
int frameCountDivisor = (int) metadataInfo.captureFrameRate / TARGET_OUTPUT_FRAME_RATE;
int normalSpeedMaxLayer = metadataInfo.inputMaxLayer;
while (normalSpeedMaxLayer >= 0) {
if ((frameCountDivisor & 1) == 1) {
// Set normalSpeedMaxLayer only if captureFrameRate / TARGET_OUTPUT_FRAME_RATE is a power of
// 2. Otherwise, the target output frame rate cannot be reached because removing a layer
// divides the number of frames by 2.
checkState(
frameCountDivisor >> 1 == 0,
"Could not compute normal speed max SVC layer for capture frame rate "
+ metadataInfo.captureFrameRate);
metadataInfo.normalSpeedMaxLayer = normalSpeedMaxLayer;
break;
}
frameCountDivisor >>= 1;
normalSpeedMaxLayer--;
}
return metadataInfo;
}
/** Metadata of an SEF slow motion input. */
private static final class MetadataInfo {
/**
* The frame rate at which the slow motion video has been captured in fps, or {@link
* C#RATE_UNSET} if it is unknown or invalid.
*/
public float captureFrameRate;
/**
* The maximum SVC layer value of the input frames, or {@link C#INDEX_UNSET} if it is unknown.
*/
public int inputMaxLayer;
/**
* The maximum SVC layer value of the frames to keep in order to play the video at normal speed
* at {@link #TARGET_OUTPUT_FRAME_RATE}, or {@link C#INDEX_UNSET} if it is unknown.
*/
public int normalSpeedMaxLayer;
/** The input {@link SlowMotionData}. */
@Nullable public SlowMotionData slowMotionData;
public MetadataInfo() {
captureFrameRate = C.RATE_UNSET;
inputMaxLayer = C.INDEX_UNSET;
normalSpeedMaxLayer = C.INDEX_UNSET;
}
}
/** Information about a slow motion segment. */
private static final class SegmentInfo {
/** The segment start time, in microseconds. */
public final long startTimeUs;
/** The segment end time, in microseconds. */
public final long endTimeUs;
/**
* The segment speedDivisor.
*
* @see SlowMotionData.Segment#speedDivisor
*/
public final int speedDivisor;
/**
* The maximum SVC layer value of the frames to keep in the segment in order to slow down the
* segment by {@code speedDivisor}.
*/
public final int maxLayer;
public SegmentInfo(SlowMotionData.Segment segment, int inputMaxLayer, int normalSpeedLayer) {
this.startTimeUs = C.msToUs(segment.startTimeMs);
this.endTimeUs = C.msToUs(segment.endTimeMs);
this.speedDivisor = segment.speedDivisor;
this.maxLayer = getSlowMotionMaxLayer(speedDivisor, inputMaxLayer, normalSpeedLayer);
}
private static int getSlowMotionMaxLayer(
int speedDivisor, int inputMaxLayer, int normalSpeedMaxLayer) {
int maxLayer = normalSpeedMaxLayer;
// Increase the maximum layer to increase the number of frames in the segment. For every layer
// increment, the number of frames is doubled.
int shiftedSpeedDivisor = speedDivisor;
while (shiftedSpeedDivisor > 0) {
if ((shiftedSpeedDivisor & 1) == 1) {
checkState(shiftedSpeedDivisor >> 1 == 0, "Invalid speed divisor: " + speedDivisor);
break;
}
maxLayer++;
shiftedSpeedDivisor >>= 1;
}
// The optimal segment max layer can be larger than the input max layer. In this case, it is
// not possible to have speedDivisor times more frames in the segment than outside the
// segments. The desired speed must therefore be reached by keeping all the frames and by
// decreasing the frame rate in the segment.
return min(maxLayer, inputMaxLayer);
}
}
}

View File

@ -0,0 +1,119 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.metadata.mp4.SlowMotionData.Segment.BY_START_THEN_END_THEN_DIVISOR;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData.Segment;
import com.google.android.exoplayer2.metadata.mp4.SmtaMetadataEntry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/** A {@link SpeedProvider} for slow motion segments. */
/* package */ class SegmentSpeedProvider implements SpeedProvider {
/**
* Input frame rate of Samsung Slow motion videos is always 30. See
* go/exoplayer-sef-slomo-video-flattening.
*/
private static final int INPUT_FRAME_RATE = 30;
private final ImmutableSortedMap<Long, Float> speedsByStartTimeUs;
private final float baseSpeedMultiplier;
public SegmentSpeedProvider(Format format) {
float captureFrameRate = getCaptureFrameRate(format);
this.baseSpeedMultiplier =
captureFrameRate == C.RATE_UNSET ? 1 : captureFrameRate / INPUT_FRAME_RATE;
this.speedsByStartTimeUs = buildSpeedByStartTimeUsMap(format, baseSpeedMultiplier);
}
@Override
public float getSpeed(long timeUs) {
checkArgument(timeUs >= 0);
@Nullable Map.Entry<Long, Float> entry = speedsByStartTimeUs.floorEntry(timeUs);
return entry != null ? entry.getValue() : baseSpeedMultiplier;
}
private static ImmutableSortedMap<Long, Float> buildSpeedByStartTimeUsMap(
Format format, float baseSpeed) {
List<Segment> segments = extractSlowMotionSegments(format);
if (segments.isEmpty()) {
return ImmutableSortedMap.of();
}
TreeMap<Long, Float> speedsByStartTimeUs = new TreeMap<>();
// Start time maps to the segment speed.
for (int i = 0; i < segments.size(); i++) {
Segment currentSegment = segments.get(i);
speedsByStartTimeUs.put(
C.msToUs(currentSegment.startTimeMs), baseSpeed / currentSegment.speedDivisor);
}
// If the map has an entry at endTime, this is the next segments start time. If no such entry
// exists, map the endTime to base speed because the times after the end time are not in a
// segment.
for (int i = 0; i < segments.size(); i++) {
Segment currentSegment = segments.get(i);
if (!speedsByStartTimeUs.containsKey(C.msToUs(currentSegment.endTimeMs))) {
speedsByStartTimeUs.put(C.msToUs(currentSegment.endTimeMs), baseSpeed);
}
}
return ImmutableSortedMap.copyOf(speedsByStartTimeUs);
}
private static float getCaptureFrameRate(Format format) {
@Nullable Metadata metadata = format.metadata;
if (metadata == null) {
return C.RATE_UNSET;
}
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SmtaMetadataEntry) {
return ((SmtaMetadataEntry) entry).captureFrameRate;
}
}
return C.RATE_UNSET;
}
private static ImmutableList<Segment> extractSlowMotionSegments(Format format) {
List<Segment> segments = new ArrayList<>();
@Nullable Metadata metadata = format.metadata;
if (metadata != null) {
for (int i = 0; i < metadata.length(); i++) {
Metadata.Entry entry = metadata.get(i);
if (entry instanceof SlowMotionData) {
segments.addAll(((SlowMotionData) entry).segments);
}
}
}
return ImmutableList.sortedCopyOf(BY_START_THEN_END_THEN_DIVISOR, segments);
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
/** A custom interface that determines the speed for media at specific timestamps. */
public interface SpeedProvider {
/**
* Provides the speed that the media should be played at, based on the timeUs.
*
* @param timeUs The timestamp of the media.
* @return The speed that the media should be played at, based on the timeUs.
*/
float getSpeed(long timeUs);
}

View File

@ -0,0 +1,37 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
/** A media transformation configuration. */
/* package */ final class Transformation {
public final boolean removeAudio;
public final boolean removeVideo;
public final boolean flattenForSlowMotion;
public final String outputMimeType;
public Transformation(
boolean removeAudio,
boolean removeVideo,
boolean flattenForSlowMotion,
String outputMimeType) {
this.removeAudio = removeAudio;
this.removeVideo = removeVideo;
this.flattenForSlowMotion = flattenForSlowMotion;
this.outputMimeType = outputMimeType;
}
}

View File

@ -0,0 +1,653 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS;
import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS;
import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_MAX_BUFFER_MS;
import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_MIN_BUFFER_MS;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import static java.lang.Math.min;
import android.content.Context;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.Looper;
import android.os.ParcelFileDescriptor;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.DefaultLoadControl;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Renderer;
import com.google.android.exoplayer2.RenderersFactory;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.AnalyticsListener;
import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor;
import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
import com.google.android.exoplayer2.source.MediaSourceFactory;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoRendererEventListener;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A transformer to transform media inputs.
*
* <p>The same Transformer instance can be used to transform multiple inputs (sequentially, not
* concurrently).
*
* <p>Transformer instances must be accessed from a single application thread. For the vast majority
* of cases this should be the application's main thread. The thread on which a Transformer instance
* must be accessed can be explicitly specified by passing a {@link Looper} when creating the
* transformer. If no Looper is specified, then the Looper of the thread that the {@link
* Transformer.Builder} is created on is used, or if that thread does not have a Looper, the Looper
* of the application's main thread is used. In all cases the Looper of the thread from which the
* transformer must be accessed can be queried using {@link #getApplicationLooper()}.
*/
@RequiresApi(18)
public final class Transformer {
/** A builder for {@link Transformer} instances. */
public static final class Builder {
private @MonotonicNonNull Context context;
private @MonotonicNonNull MediaSourceFactory mediaSourceFactory;
private boolean removeAudio;
private boolean removeVideo;
private boolean flattenForSlowMotion;
private String outputMimeType;
private Transformer.Listener listener;
private Looper looper;
private Clock clock;
/** Creates a builder with default values. */
public Builder() {
outputMimeType = MimeTypes.VIDEO_MP4;
listener = new Listener() {};
looper = Util.getCurrentOrMainLooper();
clock = Clock.DEFAULT;
}
/** Creates a builder with the values of the provided {@link Transformer}. */
private Builder(Transformer transformer) {
this.context = transformer.context;
this.mediaSourceFactory = transformer.mediaSourceFactory;
this.removeAudio = transformer.transformation.removeAudio;
this.removeVideo = transformer.transformation.removeVideo;
this.flattenForSlowMotion = transformer.transformation.flattenForSlowMotion;
this.outputMimeType = transformer.transformation.outputMimeType;
this.listener = transformer.listener;
this.looper = transformer.looper;
this.clock = transformer.clock;
}
/**
* Sets the {@link Context}.
*
* <p>This parameter is mandatory.
*
* @param context The {@link Context}.
* @return This builder.
*/
public Builder setContext(Context context) {
this.context = context.getApplicationContext();
return this;
}
/**
* Sets the {@link MediaSourceFactory} to be used to retrieve the inputs to transform. The
* default value is a {@link DefaultMediaSourceFactory} built with the context provided in
* {@link #setContext(Context)}.
*
* @param mediaSourceFactory A {@link MediaSourceFactory}.
* @return This builder.
*/
public Builder setMediaSourceFactory(MediaSourceFactory mediaSourceFactory) {
this.mediaSourceFactory = mediaSourceFactory;
return this;
}
/**
* Sets whether to remove the audio from the output. The default value is {@code false}.
*
* <p>The audio and video cannot both be removed because the output would not contain any
* samples.
*
* @param removeAudio Whether to remove the audio.
* @return This builder.
*/
public Builder setRemoveAudio(boolean removeAudio) {
this.removeAudio = removeAudio;
return this;
}
/**
* Sets whether to remove the video from the output. The default value is {@code false}.
*
* <p>The audio and video cannot both be removed because the output would not contain any
* samples.
*
* @param removeVideo Whether to remove the video.
* @return This builder.
*/
public Builder setRemoveVideo(boolean removeVideo) {
this.removeVideo = removeVideo;
return this;
}
/**
* Sets whether the input should be flattened for media containing slow motion markers. The
* transformed output is obtained by removing the slow motion metadata and by actually slowing
* down the parts of the video and audio streams defined in this metadata. The default value for
* {@code flattenForSlowMotion} is {@code false}.
*
* <p>Only Samsung Extension Format (SEF) slow motion metadata type is supported. The
* transformation has no effect if the input does not contain this metadata type.
*
* <p>For SEF slow motion media, the following assumptions are made on the input:
*
* <ul>
* <li>The input container format is (unfragmented) MP4.
* <li>The input contains an AVC video elementary stream with temporal SVC.
* <li>The recording frame rate of the video is 120 or 240 fps.
* </ul>
*
* <p>If specifying a {@link MediaSourceFactory} using {@link
* #setMediaSourceFactory(MediaSourceFactory)}, make sure that {@link
* Mp4Extractor#FLAG_READ_SEF_DATA} is set on the {@link Mp4Extractor} used. Otherwise, the slow
* motion metadata will be ignored and the input won't be flattened.
*
* @param flattenForSlowMotion Whether to flatten for slow motion.
* @return This builder.
*/
public Builder setFlattenForSlowMotion(boolean flattenForSlowMotion) {
this.flattenForSlowMotion = flattenForSlowMotion;
return this;
}
/**
* Sets the MIME type of the output. The default value is {@link MimeTypes#VIDEO_MP4}. Supported
* values are:
*
* <ul>
* <li>{@link MimeTypes#VIDEO_MP4}
* <li>{@link MimeTypes#VIDEO_WEBM} from API level 21
* </ul>
*
* @param outputMimeType The MIME type of the output.
* @return This builder.
* @throws IllegalArgumentException If the MIME type is not supported.
*/
public Builder setOutputMimeType(String outputMimeType) {
if (!MuxerWrapper.supportsOutputMimeType(outputMimeType)) {
throw new IllegalArgumentException("Unsupported output MIME type: " + outputMimeType);
}
this.outputMimeType = outputMimeType;
return this;
}
/**
* Sets the {@link Transformer.Listener} to listen to the transformation events.
*
* <p>This is equivalent to {@link Transformer#setListener(Listener)}.
*
* @param listener A {@link Transformer.Listener}.
* @return This builder.
*/
public Builder setListener(Transformer.Listener listener) {
this.listener = listener;
return this;
}
/**
* Sets the {@link Looper} that must be used for all calls to the transformer and that is used
* to call listeners on. The default value is the Looper of the thread that this builder was
* created on, or if that thread does not have a Looper, the Looper of the application's main
* thread.
*
* @param looper A {@link Looper}.
* @return This builder.
*/
public Builder setLooper(Looper looper) {
this.looper = looper;
return this;
}
/**
* Sets the {@link Clock} that will be used by the transformer. The default value is {@link
* Clock#DEFAULT}.
*
* @param clock The {@link Clock} instance.
* @return This builder.
*/
@VisibleForTesting
/* package */ Builder setClock(Clock clock) {
this.clock = clock;
return this;
}
/**
* Builds a {@link Transformer} instance.
*
* @throws IllegalStateException If the {@link Context} has not been provided.
* @throws IllegalStateException If both audio and video have been removed (otherwise the output
* would not contain any samples).
*/
public Transformer build() {
checkStateNotNull(context);
if (mediaSourceFactory == null) {
DefaultExtractorsFactory defaultExtractorsFactory = new DefaultExtractorsFactory();
if (flattenForSlowMotion) {
defaultExtractorsFactory.setMp4ExtractorFlags(Mp4Extractor.FLAG_READ_SEF_DATA);
}
mediaSourceFactory = new DefaultMediaSourceFactory(context, defaultExtractorsFactory);
}
Transformation transformation =
new Transformation(removeAudio, removeVideo, flattenForSlowMotion, outputMimeType);
return new Transformer(context, mediaSourceFactory, transformation, listener, looper, clock);
}
}
/** A listener for the transformation events. */
public interface Listener {
/**
* Called when the transformation is completed.
*
* @param inputMediaItem The {@link MediaItem} for which the transformation is completed.
*/
default void onTransformationCompleted(MediaItem inputMediaItem) {}
/**
* Called if an error occurs during the transformation.
*
* @param inputMediaItem The {@link MediaItem} for which the error occurs.
* @param exception The exception describing the error.
*/
default void onTransformationError(MediaItem inputMediaItem, Exception exception) {}
}
/**
* Progress state. One of {@link #PROGRESS_STATE_WAITING_FOR_AVAILABILITY}, {@link
* #PROGRESS_STATE_AVAILABLE}, {@link #PROGRESS_STATE_UNAVAILABLE}, {@link
* #PROGRESS_STATE_NO_TRANSFORMATION}
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
PROGRESS_STATE_WAITING_FOR_AVAILABILITY,
PROGRESS_STATE_AVAILABLE,
PROGRESS_STATE_UNAVAILABLE,
PROGRESS_STATE_NO_TRANSFORMATION
})
public @interface ProgressState {}
/**
* Indicates that the progress is unavailable for the current transformation, but might become
* available.
*/
public static final int PROGRESS_STATE_WAITING_FOR_AVAILABILITY = 0;
/** Indicates that the progress is available. */
public static final int PROGRESS_STATE_AVAILABLE = 1;
/** Indicates that the progress is permanently unavailable for the current transformation. */
public static final int PROGRESS_STATE_UNAVAILABLE = 2;
/** Indicates that there is no current transformation. */
public static final int PROGRESS_STATE_NO_TRANSFORMATION = 4;
private final Context context;
private final MediaSourceFactory mediaSourceFactory;
private final Transformation transformation;
private final Looper looper;
private final Clock clock;
private Transformer.Listener listener;
@Nullable private MuxerWrapper muxerWrapper;
@Nullable private SimpleExoPlayer player;
@ProgressState private int progressState;
private Transformer(
Context context,
MediaSourceFactory mediaSourceFactory,
Transformation transformation,
Transformer.Listener listener,
Looper looper,
Clock clock) {
checkState(
!transformation.removeAudio || !transformation.removeVideo,
"Audio and video cannot both be removed.");
this.context = context;
this.mediaSourceFactory = mediaSourceFactory;
this.transformation = transformation;
this.listener = listener;
this.looper = looper;
this.clock = clock;
progressState = PROGRESS_STATE_NO_TRANSFORMATION;
}
/** Returns a {@link Transformer.Builder} initialized with the values of this instance. */
public Builder buildUpon() {
return new Builder(this);
}
/**
* Sets the {@link Transformer.Listener} to listen to the transformation events.
*
* @param listener A {@link Transformer.Listener}.
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void setListener(Transformer.Listener listener) {
verifyApplicationThread();
this.listener = listener;
}
/**
* Starts an asynchronous operation to transform the given {@link MediaItem}.
*
* <p>The transformation state is notified through the {@link Builder#setListener(Listener)
* listener}.
*
* <p>Concurrent transformations on the same Transformer object are not allowed.
*
* <p>The output can contain at most one video track and one audio track. Other track types are
* ignored. For adaptive bitrate {@link com.google.android.exoplayer2.source.MediaSource media
* sources}, the highest bitrate video and audio streams are selected.
*
* @param mediaItem The {@link MediaItem} to transform. The supported sample formats depend on the
* output container format and are described in {@link MediaMuxer#addTrack(MediaFormat)}.
* @param path The path to the output file.
* @throws IllegalArgumentException If the path is invalid.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If a transformation is already in progress.
* @throws IOException If an error occurs opening the output file for writing.
*/
public void startTransformation(MediaItem mediaItem, String path) throws IOException {
startTransformation(mediaItem, new MuxerWrapper(path, transformation.outputMimeType));
}
/**
* Starts an asynchronous operation to transform the given {@link MediaItem}.
*
* <p>The transformation state is notified through the {@link Builder#setListener(Listener)
* listener}.
*
* <p>Concurrent transformations on the same Transformer object are not allowed.
*
* <p>The output can contain at most one video track and one audio track. Other track types are
* ignored. For adaptive bitrate {@link com.google.android.exoplayer2.source.MediaSource media
* sources}, the highest bitrate video and audio streams are selected.
*
* @param mediaItem The {@link MediaItem} to transform. The supported sample formats depend on the
* output container format and are described in {@link MediaMuxer#addTrack(MediaFormat)}.
* @param parcelFileDescriptor A readable and writable {@link ParcelFileDescriptor} of the output.
* The file referenced by this ParcelFileDescriptor should not be used before the
* transformation is completed. It is the responsibility of the caller to close the
* ParcelFileDescriptor. This can be done after this method returns.
* @throws IllegalArgumentException If the file descriptor is invalid.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If a transformation is already in progress.
* @throws IOException If an error occurs opening the output file for writing.
*/
@RequiresApi(26)
public void startTransformation(MediaItem mediaItem, ParcelFileDescriptor parcelFileDescriptor)
throws IOException {
startTransformation(
mediaItem, new MuxerWrapper(parcelFileDescriptor, transformation.outputMimeType));
}
private void startTransformation(MediaItem mediaItem, MuxerWrapper muxerWrapper) {
verifyApplicationThread();
if (player != null) {
throw new IllegalStateException("There is already a transformation in progress.");
}
this.muxerWrapper = muxerWrapper;
DefaultTrackSelector trackSelector = new DefaultTrackSelector(context);
trackSelector.setParameters(
new DefaultTrackSelector.ParametersBuilder(context)
.setForceHighestSupportedBitrate(true)
.build());
// Arbitrarily decrease buffers for playback so that samples start being sent earlier to the
// muxer (rebuffers are less problematic for the transformation use case).
DefaultLoadControl loadControl =
new DefaultLoadControl.Builder()
.setBufferDurationsMs(
DEFAULT_MIN_BUFFER_MS,
DEFAULT_MAX_BUFFER_MS,
DEFAULT_BUFFER_FOR_PLAYBACK_MS / 10,
DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS / 10)
.build();
player =
new SimpleExoPlayer.Builder(
context, new TransformerRenderersFactory(muxerWrapper, transformation))
.setMediaSourceFactory(mediaSourceFactory)
.setTrackSelector(trackSelector)
.setLoadControl(loadControl)
.setLooper(looper)
.setClock(clock)
.build();
player.setMediaItem(mediaItem);
player.addAnalyticsListener(new TransformerAnalyticsListener(mediaItem, muxerWrapper));
player.prepare();
progressState = PROGRESS_STATE_WAITING_FOR_AVAILABILITY;
}
/**
* Returns the {@link Looper} associated with the application thread that's used to access the
* transformer and on which transformer events are received.
*/
public Looper getApplicationLooper() {
return looper;
}
/**
* Returns the current {@link ProgressState} and updates {@code progressHolder} with the current
* progress if it is {@link #PROGRESS_STATE_AVAILABLE available}.
*
* <p>After a transformation {@link Listener#onTransformationCompleted(MediaItem) completes}, this
* method returns {@link #PROGRESS_STATE_NO_TRANSFORMATION}.
*
* @param progressHolder A {@link ProgressHolder}, updated to hold the percentage progress if
* {@link #PROGRESS_STATE_AVAILABLE available}.
* @return The {@link ProgressState}.
* @throws IllegalStateException If this method is called from the wrong thread.
*/
@ProgressState
public int getProgress(ProgressHolder progressHolder) {
verifyApplicationThread();
if (progressState == PROGRESS_STATE_AVAILABLE) {
Player player = checkNotNull(this.player);
long durationMs = player.getDuration();
long positionMs = player.getCurrentPosition();
progressHolder.progress = min((int) (positionMs * 100 / durationMs), 99);
}
return progressState;
}
/**
* Cancels the transformation that is currently in progress, if any.
*
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void cancel() {
// It doesn't matter that stopping the muxer throws, because the transformation is cancelled
// anyway.
releaseResources(/* swallowStopMuxerException= */ true);
}
/**
* Releases the resources.
*
* @param swallowStopMuxerException Whether to swallow exceptions thrown by stopping the muxer.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If the muxer is in the wrong state when stopping it and {@code
* swallowStopMuxerException} is false.
*/
private void releaseResources(boolean swallowStopMuxerException) {
verifyApplicationThread();
if (player != null) {
player.release();
player = null;
}
if (muxerWrapper != null) {
try {
muxerWrapper.stop();
} catch (IllegalStateException e) {
if (!swallowStopMuxerException) {
throw e;
}
} finally {
muxerWrapper.release();
muxerWrapper = null;
}
}
progressState = PROGRESS_STATE_NO_TRANSFORMATION;
}
private void verifyApplicationThread() {
if (Looper.myLooper() != looper) {
throw new IllegalStateException("Transformer is accessed on the wrong thread.");
}
}
private static final class TransformerRenderersFactory implements RenderersFactory {
private final MuxerWrapper muxerWrapper;
private final TransformerMediaClock mediaClock;
private final Transformation transformation;
public TransformerRenderersFactory(MuxerWrapper muxerWrapper, Transformation transformation) {
this.muxerWrapper = muxerWrapper;
this.transformation = transformation;
mediaClock = new TransformerMediaClock();
}
@Override
public Renderer[] createRenderers(
Handler eventHandler,
VideoRendererEventListener videoRendererEventListener,
AudioRendererEventListener audioRendererEventListener,
TextOutput textRendererOutput,
MetadataOutput metadataRendererOutput) {
int rendererCount = transformation.removeAudio || transformation.removeVideo ? 1 : 2;
Renderer[] renderers = new Renderer[rendererCount];
int index = 0;
if (!transformation.removeAudio) {
renderers[index] = new TransformerAudioRenderer(muxerWrapper, mediaClock, transformation);
index++;
}
if (!transformation.removeVideo) {
renderers[index] = new TransformerVideoRenderer(muxerWrapper, mediaClock, transformation);
index++;
}
return renderers;
}
}
private final class TransformerAnalyticsListener implements AnalyticsListener {
private final MediaItem mediaItem;
private final MuxerWrapper muxerWrapper;
public TransformerAnalyticsListener(MediaItem mediaItem, MuxerWrapper muxerWrapper) {
this.mediaItem = mediaItem;
this.muxerWrapper = muxerWrapper;
}
@Override
public void onPlaybackStateChanged(EventTime eventTime, int state) {
if (state == Player.STATE_ENDED) {
handleTransformationEnded(/* exception= */ null);
}
}
@Override
public void onTimelineChanged(EventTime eventTime, int reason) {
if (progressState != PROGRESS_STATE_WAITING_FOR_AVAILABILITY) {
return;
}
Timeline.Window window = new Timeline.Window();
eventTime.timeline.getWindow(/* windowIndex= */ 0, window);
if (!window.isPlaceholder) {
long durationUs = window.durationUs;
// Make progress permanently unavailable if the duration is unknown, so that it doesn't jump
// to a high value at the end of the transformation if the duration is set once the media is
// entirely loaded.
progressState =
durationUs <= 0 || durationUs == C.TIME_UNSET
? PROGRESS_STATE_UNAVAILABLE
: PROGRESS_STATE_AVAILABLE;
checkNotNull(player).play();
}
}
@Override
public void onTracksChanged(
EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
if (muxerWrapper.getTrackCount() == 0) {
handleTransformationEnded(
new IllegalStateException(
"The output does not contain any tracks. Check that at least one of the input"
+ " sample formats is supported."));
}
}
@Override
public void onPlayerError(EventTime eventTime, ExoPlaybackException error) {
handleTransformationEnded(error);
}
private void handleTransformationEnded(@Nullable Exception exception) {
try {
releaseResources(/* swallowStopMuxerException= */ false);
} catch (IllegalStateException e) {
if (exception == null) {
exception = e;
}
}
if (exception == null) {
listener.onTransformationCompleted(mediaItem);
} else {
listener.onTransformationError(mediaItem, exception);
}
}
}
}

View File

@ -0,0 +1,406 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static java.lang.Math.min;
import android.media.MediaCodec.BufferInfo;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.AudioProcessor.AudioFormat;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.nio.ByteBuffer;
@RequiresApi(18)
/* package */ final class TransformerAudioRenderer extends TransformerBaseRenderer {
private static final String TAG = "TransformerAudioRenderer";
// MediaCodec decoders always output 16 bit PCM, unless configured to output PCM float.
// https://developer.android.com/reference/android/media/MediaCodec#raw-audio-buffers.
private static final int MEDIA_CODEC_PCM_ENCODING = C.ENCODING_PCM_16BIT;
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
private static final float SPEED_UNSET = -1f;
private final DecoderInputBuffer decoderInputBuffer;
private final DecoderInputBuffer encoderInputBuffer;
private final SonicAudioProcessor sonicAudioProcessor;
@Nullable private MediaCodecAdapterWrapper decoder;
@Nullable private MediaCodecAdapterWrapper encoder;
@Nullable private SpeedProvider speedProvider;
private ByteBuffer sonicOutputBuffer;
private long nextEncoderInputBufferTimeUs;
private float currentSpeed;
private boolean muxerWrapperTrackEnded;
private boolean hasEncoderOutputFormat;
private boolean drainingSonicForSpeedChange;
public TransformerAudioRenderer(
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
super(C.TRACK_TYPE_AUDIO, muxerWrapper, mediaClock, transformation);
decoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
encoderInputBuffer =
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
sonicAudioProcessor = new SonicAudioProcessor();
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
}
@Override
public String getName() {
return TAG;
}
@Override
public boolean isEnded() {
return muxerWrapperTrackEnded;
}
@Override
protected void onReset() {
decoderInputBuffer.clear();
decoderInputBuffer.data = null;
encoderInputBuffer.clear();
encoderInputBuffer.data = null;
sonicAudioProcessor.reset();
if (decoder != null) {
decoder.release();
decoder = null;
}
if (encoder != null) {
encoder.release();
encoder = null;
}
speedProvider = null;
sonicOutputBuffer = AudioProcessor.EMPTY_BUFFER;
nextEncoderInputBufferTimeUs = 0;
currentSpeed = SPEED_UNSET;
muxerWrapperTrackEnded = false;
hasEncoderOutputFormat = false;
drainingSonicForSpeedChange = false;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (!isRendererStarted || isEnded()) {
return;
}
if (!setupDecoder() || !setupEncoderAndMaybeSonic()) {
return;
}
while (drainEncoderToFeedMuxer()) {}
if (sonicAudioProcessor.isActive()) {
while (drainSonicToFeedEncoder()) {}
while (drainDecoderToFeedSonic()) {}
} else {
while (drainDecoderToFeedEncoder()) {}
}
while (feedDecoderInputFromSource()) {}
}
/** Returns whether it may be possible to process more data with this method. */
private boolean drainEncoderToFeedMuxer() {
MediaCodecAdapterWrapper encoder = checkNotNull(this.encoder);
if (!hasEncoderOutputFormat) {
// Dequeue output format change.
encoder.maybeDequeueOutputBuffer();
@Nullable Format encoderOutputFormat = encoder.getOutputFormat();
if (encoderOutputFormat == null) {
return false;
}
hasEncoderOutputFormat = true;
muxerWrapper.addTrackFormat(encoderOutputFormat);
}
if (encoder.isEnded()) {
// Encoder output stream ended and output is empty or null so end muxer track.
muxerWrapper.endTrack(getTrackType());
muxerWrapperTrackEnded = true;
return false;
}
if (!encoder.maybeDequeueOutputBuffer()) {
return false;
}
ByteBuffer encoderOutputBuffer = checkNotNull(encoder.getOutputBuffer());
BufferInfo encoderOutputBufferInfo = checkNotNull(encoder.getOutputBufferInfo());
if (!muxerWrapper.writeSample(
getTrackType(),
encoderOutputBuffer,
/* isKeyFrame= */ true,
encoderOutputBufferInfo.presentationTimeUs)) {
return false;
}
encoder.releaseOutputBuffer();
return true;
}
/** Returns whether it may be possible to process more data with this method. */
private boolean drainDecoderToFeedEncoder() {
MediaCodecAdapterWrapper decoder = checkNotNull(this.decoder);
MediaCodecAdapterWrapper encoder = checkNotNull(this.encoder);
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (decoder.isEnded()) {
queueEndOfStreamToEncoder();
return false;
}
if (!decoder.maybeDequeueOutputBuffer()) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
flushSonicAndSetSpeed(currentSpeed);
return false;
}
ByteBuffer decoderOutputBuffer = checkNotNull(decoder.getOutputBuffer());
feedEncoder(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/** Returns whether it may be possible to process more data with this method. */
private boolean drainSonicToFeedEncoder() {
MediaCodecAdapterWrapper encoder = checkNotNull(this.encoder);
if (!encoder.maybeDequeueInputBuffer(encoderInputBuffer)) {
return false;
}
if (!sonicOutputBuffer.hasRemaining()) {
sonicOutputBuffer = sonicAudioProcessor.getOutput();
if (!sonicOutputBuffer.hasRemaining()) {
if (checkNotNull(decoder).isEnded() && sonicAudioProcessor.isEnded()) {
queueEndOfStreamToEncoder();
}
return false;
}
}
return feedEncoder(sonicOutputBuffer);
}
/** Returns whether it may be possible to process more data with this method. */
private boolean drainDecoderToFeedSonic() {
MediaCodecAdapterWrapper decoder = checkNotNull(this.decoder);
if (drainingSonicForSpeedChange) {
if (!sonicAudioProcessor.isEnded()) {
// Sonic needs draining, but has not fully drained yet.
return false;
}
flushSonicAndSetSpeed(currentSpeed);
drainingSonicForSpeedChange = false;
}
// Sonic invalidates the output buffer when more input is queued, so we don't queue if there is
// output still to be processed.
if (sonicOutputBuffer.hasRemaining()) {
return false;
}
if (decoder.isEnded()) {
sonicAudioProcessor.queueEndOfStream();
return false;
}
checkState(!sonicAudioProcessor.isEnded());
if (!decoder.maybeDequeueOutputBuffer()) {
return false;
}
if (isSpeedChanging(checkNotNull(decoder.getOutputBufferInfo()))) {
sonicAudioProcessor.queueEndOfStream();
drainingSonicForSpeedChange = true;
return false;
}
ByteBuffer decoderOutputBuffer = checkNotNull(decoder.getOutputBuffer());
sonicAudioProcessor.queueInput(decoderOutputBuffer);
if (!decoderOutputBuffer.hasRemaining()) {
decoder.releaseOutputBuffer();
}
return true;
}
/** Returns whether it may be possible to process more data with this method. */
private boolean feedDecoderInputFromSource() {
MediaCodecAdapterWrapper decoder = checkNotNull(this.decoder);
if (!decoder.maybeDequeueInputBuffer(decoderInputBuffer)) {
return false;
}
decoderInputBuffer.clear();
@SampleStream.ReadDataResult
int result = readSource(getFormatHolder(), decoderInputBuffer, /* formatRequired= */ false);
switch (result) {
case C.RESULT_BUFFER_READ:
mediaClock.updateTimeForTrackType(getTrackType(), decoderInputBuffer.timeUs);
decoderInputBuffer.flip();
return decoder.queueInputBuffer(decoderInputBuffer);
case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ:
default:
return false;
}
}
/**
* Feeds the encoder the {@link ByteBuffer inputBuffer} with the correct {@code timeUs}.
*
* @param inputBuffer The buffer to be fed.
* @return Whether more input buffers can be queued to the encoder.
*/
private boolean feedEncoder(ByteBuffer inputBuffer) {
MediaCodecAdapterWrapper encoder = checkNotNull(this.encoder);
ByteBuffer encoderInputBufferData = checkNotNull(encoderInputBuffer.data);
int bufferLimit = inputBuffer.limit();
inputBuffer.limit(min(bufferLimit, inputBuffer.position() + encoderInputBufferData.capacity()));
encoderInputBufferData.put(inputBuffer);
encoderInputBuffer.timeUs = nextEncoderInputBufferTimeUs;
nextEncoderInputBufferTimeUs +=
getBufferDurationUs(
/* bytesWritten= */ encoderInputBufferData.position(),
/* bytesPerFrame= */ Util.getPcmFrameSize(
MEDIA_CODEC_PCM_ENCODING, encoder.getConfigFormat().channelCount),
encoder.getConfigFormat().sampleRate);
encoderInputBuffer.setFlags(0);
encoderInputBuffer.flip();
inputBuffer.limit(bufferLimit);
return encoder.queueInputBuffer(encoderInputBuffer);
}
private void queueEndOfStreamToEncoder() {
MediaCodecAdapterWrapper encoder = checkNotNull(this.encoder);
checkState(checkNotNull(encoderInputBuffer.data).position() == 0);
encoderInputBuffer.addFlag(C.BUFFER_FLAG_END_OF_STREAM);
encoderInputBuffer.flip();
// Queuing EOS should only occur with an empty buffer.
encoder.queueInputBuffer(encoderInputBuffer);
}
/** Returns whether the encoder has been setup. */
private boolean setupEncoderAndMaybeSonic() throws ExoPlaybackException {
MediaCodecAdapterWrapper decoder = checkNotNull(this.decoder);
if (encoder != null) {
return true;
}
Format decoderFormat = decoder.getConfigFormat();
if (transformation.flattenForSlowMotion) {
try {
configureSonic(decoderFormat);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
throw ExoPlaybackException.createForRenderer(
e, TAG, getIndex(), /* rendererFormat= */ null, C.FORMAT_HANDLED);
}
}
Format encoderFormat =
decoderFormat.buildUpon().setAverageBitrate(DEFAULT_ENCODER_BITRATE).build();
checkNotNull(encoderFormat.sampleMimeType);
try {
encoder = MediaCodecAdapterWrapper.createForAudioEncoding(encoderFormat);
} catch (IOException e) {
throw ExoPlaybackException.createForRenderer(
e, TAG, getIndex(), encoderFormat, /* rendererFormatSupport= */ C.FORMAT_HANDLED);
}
return true;
}
/** Returns whether the decoder has been setup. */
private boolean setupDecoder() throws ExoPlaybackException {
if (decoder != null) {
return true;
}
FormatHolder formatHolder = getFormatHolder();
@SampleStream.ReadDataResult
int result = readSource(formatHolder, decoderInputBuffer, /* formatRequired= */ true);
if (result != C.RESULT_FORMAT_READ) {
return false;
}
Format decoderFormat = checkNotNull(formatHolder.format);
checkNotNull(decoderFormat.sampleMimeType);
try {
decoder = MediaCodecAdapterWrapper.createForAudioDecoding(decoderFormat);
} catch (IOException e) {
throw ExoPlaybackException.createForRenderer(
e, TAG, getIndex(), decoderFormat, /* rendererFormatSupport= */ C.FORMAT_HANDLED);
}
speedProvider = new SegmentSpeedProvider(decoderFormat);
currentSpeed = speedProvider.getSpeed(0);
return true;
}
private boolean isSpeedChanging(BufferInfo bufferInfo) {
if (!transformation.flattenForSlowMotion) {
return false;
}
float newSpeed = checkNotNull(speedProvider).getSpeed(bufferInfo.presentationTimeUs);
boolean speedChanging = newSpeed != currentSpeed;
currentSpeed = newSpeed;
return speedChanging;
}
private void configureSonic(Format format) throws AudioProcessor.UnhandledAudioFormatException {
sonicAudioProcessor.configure(
new AudioFormat(format.sampleRate, format.channelCount, MEDIA_CODEC_PCM_ENCODING));
flushSonicAndSetSpeed(currentSpeed);
}
private void flushSonicAndSetSpeed(float speed) {
sonicAudioProcessor.setSpeed(speed);
sonicAudioProcessor.setPitch(speed);
sonicAudioProcessor.flush();
}
private static long getBufferDurationUs(long bytesWritten, int bytesPerFrame, int sampleRate) {
long framesWritten = bytesWritten / bytesPerFrame;
return framesWritten * C.MICROS_PER_SECOND / sampleRate;
}
}

View File

@ -0,0 +1,87 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.BaseRenderer;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.MimeTypes;
@RequiresApi(18)
/* package */ abstract class TransformerBaseRenderer extends BaseRenderer {
protected final MuxerWrapper muxerWrapper;
protected final TransformerMediaClock mediaClock;
protected final Transformation transformation;
protected boolean isRendererStarted;
public TransformerBaseRenderer(
int trackType,
MuxerWrapper muxerWrapper,
TransformerMediaClock mediaClock,
Transformation transformation) {
super(trackType);
this.muxerWrapper = muxerWrapper;
this.mediaClock = mediaClock;
this.transformation = transformation;
}
@Override
@C.FormatSupport
public final int supportsFormat(Format format) {
@Nullable String sampleMimeType = format.sampleMimeType;
if (MimeTypes.getTrackType(format.sampleMimeType) != getTrackType()) {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE);
} else if (muxerWrapper.supportsSampleMimeType(sampleMimeType)) {
return RendererCapabilities.create(C.FORMAT_HANDLED);
} else {
return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE);
}
}
@Override
public final boolean isReady() {
return isSourceReady();
}
@Override
public final MediaClock getMediaClock() {
return mediaClock;
}
@Override
protected final void onEnabled(boolean joining, boolean mayRenderStartOfStream) {
muxerWrapper.registerTrack();
mediaClock.updateTimeForTrackType(getTrackType(), 0L);
}
@Override
protected final void onStarted() {
isRendererStarted = true;
}
@Override
protected final void onStopped() {
isRendererStarted = false;
}
}

View File

@ -0,0 +1,68 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Util.minValue;
import android.util.SparseLongArray;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.util.MediaClock;
@RequiresApi(18)
/* package */ final class TransformerMediaClock implements MediaClock {
private final SparseLongArray trackTypeToTimeUs;
private long minTrackTimeUs;
public TransformerMediaClock() {
trackTypeToTimeUs = new SparseLongArray();
}
/**
* Updates the time for a given track type. The clock time is computed based on the different
* track times.
*/
public void updateTimeForTrackType(int trackType, long timeUs) {
long previousTimeUs = trackTypeToTimeUs.get(trackType, /* valueIfKeyNotFound= */ C.TIME_UNSET);
if (previousTimeUs != C.TIME_UNSET && timeUs <= previousTimeUs) {
// Make sure that the track times are increasing and therefore that the clock time is
// increasing. This is necessary for progress updates.
return;
}
trackTypeToTimeUs.put(trackType, timeUs);
if (previousTimeUs == C.TIME_UNSET || previousTimeUs == minTrackTimeUs) {
minTrackTimeUs = minValue(trackTypeToTimeUs);
}
}
@Override
public long getPositionUs() {
// Use minimum position among tracks as position to ensure that the buffered duration is
// positive. This is also useful for controlling samples interleaving.
return minTrackTimeUs;
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {}
@Override
public PlaybackParameters getPlaybackParameters() {
// Playback parameters are unknown. Set default value.
return PlaybackParameters.DEFAULT;
}
}

View File

@ -0,0 +1,128 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.source.SampleStream;
import java.nio.ByteBuffer;
@RequiresApi(18)
/* package */ final class TransformerVideoRenderer extends TransformerBaseRenderer {
private static final String TAG = "TransformerVideoRenderer";
private final DecoderInputBuffer buffer;
@Nullable private SampleTransformer sampleTransformer;
private boolean formatRead;
private boolean isBufferPending;
private boolean isInputStreamEnded;
public TransformerVideoRenderer(
MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, Transformation transformation) {
super(C.TRACK_TYPE_VIDEO, muxerWrapper, mediaClock, transformation);
buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT);
}
@Override
public String getName() {
return TAG;
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) {
if (!isRendererStarted || isEnded()) {
return;
}
if (!formatRead) {
FormatHolder formatHolder = getFormatHolder();
@SampleStream.ReadDataResult
int result = readSource(formatHolder, buffer, /* formatRequired= */ true);
if (result != C.RESULT_FORMAT_READ) {
return;
}
Format format = checkNotNull(formatHolder.format);
formatRead = true;
if (transformation.flattenForSlowMotion) {
sampleTransformer = new SefSlowMotionVideoSampleTransformer(format);
}
muxerWrapper.addTrackFormat(format);
}
while (true) {
// Read sample.
if (!isBufferPending && !readAndTransformBuffer()) {
return;
}
// Write sample.
isBufferPending =
!muxerWrapper.writeSample(
getTrackType(), buffer.data, buffer.isKeyFrame(), buffer.timeUs);
if (isBufferPending) {
return;
}
}
}
@Override
public boolean isEnded() {
return isInputStreamEnded;
}
/**
* Checks whether a sample can be read and, if so, reads it, transforms it and writes the
* resulting sample to the {@link #buffer}.
*
* <p>The buffer data can be set to null if the transformation applied discards the sample.
*
* @return Whether a sample has been read and transformed.
*/
private boolean readAndTransformBuffer() {
buffer.clear();
@SampleStream.ReadDataResult
int result = readSource(getFormatHolder(), buffer, /* formatRequired= */ false);
if (result == C.RESULT_FORMAT_READ) {
throw new IllegalStateException("Format changes are not supported.");
} else if (result == C.RESULT_NOTHING_READ) {
return false;
}
// Buffer read.
if (buffer.isEndOfStream()) {
isInputStreamEnded = true;
muxerWrapper.endTrack(getTrackType());
return false;
}
mediaClock.updateTimeForTrackType(getTrackType(), buffer.timeUs);
ByteBuffer data = checkNotNull(buffer.data);
data.flip();
if (sampleTransformer != null) {
sampleTransformer.transformSample(buffer);
}
return true;
}
}

View File

@ -0,0 +1,19 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package com.google.android.exoplayer2.transformer;
import com.google.android.exoplayer2.util.NonNullApi;

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright 2020 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<manifest package="com.google.android.exoplayer2.transformer.test">
<uses-sdk/>
</manifest>

View File

@ -0,0 +1,301 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.SefSlowMotionVideoSampleTransformer.INPUT_FRAME_RATE;
import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData;
import com.google.android.exoplayer2.metadata.mp4.SmtaMetadataEntry;
import com.google.android.exoplayer2.util.MimeTypes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit tests for {@link SefSlowMotionVideoSampleTransformer}. */
@RunWith(AndroidJUnit4.class)
public class SefSlowMotionVideoSampleTransformerTest {
/**
* Sequence of temporal SVC layers in an SEF slow motion video track with a maximum layer of 3.
*
* <p>Each value is attached to a frame and the sequence is repeated until there is no frame left.
*/
private static final int[] LAYER_SEQUENCE_MAX_LAYER_THREE = new int[] {0, 3, 2, 3, 1, 3, 2, 3};
@Test
public void processCurrentFrame_240fps_keepsExpectedFrames() {
int captureFrameRate = 240;
int inputMaxLayer = 3;
int frameCount = 46;
SlowMotionData.Segment segment1 =
createSegment(/* startFrameIndex= */ 11, /* endFrameIndex= */ 17, /* speedDivisor= */ 2);
SlowMotionData.Segment segment2 =
createSegment(/* startFrameIndex= */ 31, /* endFrameIndex= */ 38, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0);
assertThat(outputLayers).isEqualTo(expectedLayers);
}
@Test
public void processCurrentFrame_120fps_keepsExpectedFrames() {
int captureFrameRate = 120;
int inputMaxLayer = 3;
int frameCount = 46;
SlowMotionData.Segment segment1 =
createSegment(/* startFrameIndex= */ 9, /* endFrameIndex= */ 17, /* speedDivisor= */ 4);
SlowMotionData.Segment segment2 =
createSegment(/* startFrameIndex= */ 31, /* endFrameIndex= */ 38, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers =
Arrays.asList(0, 1, 0, 3, 2, 3, 1, 3, 2, 3, 0, 1, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0, 1);
assertThat(outputLayers).isEqualTo(expectedLayers);
}
@Test
public void processCurrentFrame_contiguousSegments_keepsExpectedFrames() {
int captureFrameRate = 240;
int inputMaxLayer = 3;
int frameCount = 26;
SlowMotionData.Segment segment1 =
createSegment(/* startFrameIndex= */ 11, /* endFrameIndex= */ 19, /* speedDivisor= */ 2);
SlowMotionData.Segment segment2 =
createSegment(/* startFrameIndex= */ 19, /* endFrameIndex= */ 22, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 2, 3, 1, 3, 0);
assertThat(outputLayers).isEqualTo(expectedLayers);
}
@Test
public void processCurrentFrame_skipsSegmentsWithNoFrame() {
int captureFrameRate = 240;
int inputMaxLayer = 3;
int frameCount = 16;
SlowMotionData.Segment segmentWithNoFrame1 =
new SlowMotionData.Segment(
/* startTimeMs= */ 120, /* endTimeMs= */ 130, /* speedDivisor= */ 2);
SlowMotionData.Segment segmentWithNoFrame2 =
new SlowMotionData.Segment(
/* startTimeMs= */ 270, /* endTimeMs= */ 280, /* speedDivisor= */ 2);
SlowMotionData.Segment segmentWithFrame =
createSegment(/* startFrameIndex= */ 11, /* endFrameIndex= */ 16, /* speedDivisor= */ 2);
Format format =
createSefSlowMotionFormat(
captureFrameRate,
inputMaxLayer,
Arrays.asList(segmentWithNoFrame1, segmentWithNoFrame2, segmentWithFrame));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1);
assertThat(outputLayers).isEqualTo(expectedLayers);
}
@Test
public void getCurrentFrameOutputTimeUs_240fps_outputsExpectedTimes() {
int captureFrameRate = 240;
int inputMaxLayer = 3;
int frameCount = 16;
SlowMotionData.Segment segment1 =
new SlowMotionData.Segment(
/* startTimeMs= */ 50, /* endTimeMs= */ 150, /* speedDivisor= */ 2);
SlowMotionData.Segment segment2 =
new SlowMotionData.Segment(
/* startTimeMs= */ 210, /* endTimeMs= */ 360, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside segment.
assertThat(outputTimesUs.get(9))
.isEqualTo(Math.round((300.0 + 100 + (300 - 210) * 7) * 1000 * 30 / 240));
// Test frame outside segment.
assertThat(outputTimesUs.get(13))
.isEqualTo(Math.round((433 + 1 / 3.0 + 100 + 150 * 7) * 1000 * 30 / 240));
}
@Test
public void getCurrentFrameOutputTimeUs_120fps_outputsExpectedTimes() {
int captureFrameRate = 120;
int inputMaxLayer = 3;
int frameCount = 16;
SlowMotionData.Segment segment1 =
new SlowMotionData.Segment(
/* startTimeMs= */ 50, /* endTimeMs= */ 150, /* speedDivisor= */ 2);
SlowMotionData.Segment segment2 =
new SlowMotionData.Segment(
/* startTimeMs= */ 210, /* endTimeMs= */ 360, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside segment.
assertThat(outputTimesUs.get(9))
.isEqualTo(Math.round((300.0 + 100 + (300 - 210) * 7) * 1000 * 30 / 120));
// Test frame outside segment.
assertThat(outputTimesUs.get(13))
.isEqualTo(Math.round((433 + 1 / 3.0 + 100 + 150 * 7) * 1000 * 30 / 120));
}
@Test
public void getCurrentFrameOutputTimeUs_contiguousSegments_outputsExpectedTimes() {
int captureFrameRate = 240;
int inputMaxLayer = 3;
int frameCount = 16;
SlowMotionData.Segment segment1 =
new SlowMotionData.Segment(
/* startTimeMs= */ 50, /* endTimeMs= */ 210, /* speedDivisor= */ 2);
SlowMotionData.Segment segment2 =
new SlowMotionData.Segment(
/* startTimeMs= */ 210, /* endTimeMs= */ 360, /* speedDivisor= */ 8);
Format format =
createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer =
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside second segment.
assertThat(outputTimesUs.get(9)).isEqualTo(136_250);
}
/**
* Creates a {@link SlowMotionData.Segment}.
*
* @param startFrameIndex The index of the first frame in the segment.
* @param endFrameIndex The index of the first frame following the segment.
* @param speedDivisor The factor by which the input is slowed down in the segment.
* @return A {@link SlowMotionData.Segment}.
*/
private static SlowMotionData.Segment createSegment(
int startFrameIndex, int endFrameIndex, int speedDivisor) {
return new SlowMotionData.Segment(
/* startTimeMs= */ (int) (startFrameIndex * C.MILLIS_PER_SECOND / INPUT_FRAME_RATE),
/* endTimeMs= */ (int) (endFrameIndex * C.MILLIS_PER_SECOND / INPUT_FRAME_RATE) - 1,
speedDivisor);
}
/** Creates a {@link Format} for an SEF slow motion video track. */
private static Format createSefSlowMotionFormat(
int captureFrameRate,
int inputMaxLayer,
List<SlowMotionData.Segment> segments) {
SmtaMetadataEntry smtaMetadataEntry =
new SmtaMetadataEntry(captureFrameRate, /* svcTemporalLayerCount= */ inputMaxLayer + 1);
SlowMotionData slowMotionData = new SlowMotionData(segments);
Metadata metadata = new Metadata(smtaMetadataEntry, slowMotionData);
return new Format.Builder()
.setSampleMimeType(MimeTypes.VIDEO_H264)
.setMetadata(metadata)
.build();
}
/**
* Returns a list containing the temporal SVC layers of the frames that should be kept according
* to {@link SefSlowMotionVideoSampleTransformer#processCurrentFrame(int, long)}.
*
* @param sampleTransformer The {@link SefSlowMotionVideoSampleTransformer}.
* @param layerSequence The sequence of layer values in the input.
* @param frameCount The number of video frames in the input.
* @return The output layers.
*/
private static List<Integer> getKeptOutputLayers(
SefSlowMotionVideoSampleTransformer sampleTransformer,
int[] layerSequence,
int frameCount) {
List<Integer> outputLayers = new ArrayList<>();
for (int i = 0; i < frameCount; i++) {
int layer = layerSequence[i % layerSequence.length];
long timeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE;
if (sampleTransformer.processCurrentFrame(layer, timeUs)) {
outputLayers.add(layer);
}
}
return outputLayers;
}
/**
* Returns a list containing the frame output times obtained using {@link
* SefSlowMotionVideoSampleTransformer#getCurrentFrameOutputTimeUs(long)}.
*
* <p>The output contains the output times for all the input frames, regardless of whether they
* should be kept or not.
*
* @param sampleTransformer The {@link SefSlowMotionVideoSampleTransformer}.
* @param layerSequence The sequence of layer values in the input.
* @param frameCount The number of video frames in the input.
* @return The frame output times, in microseconds.
*/
private static List<Long> getOutputTimesUs(
SefSlowMotionVideoSampleTransformer sampleTransformer,
int[] layerSequence,
int frameCount) {
List<Long> outputTimesUs = new ArrayList<>();
for (int i = 0; i < frameCount; i++) {
int layer = layerSequence[i % layerSequence.length];
long inputTimeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE;
sampleTransformer.processCurrentFrame(layer, inputTimeUs);
outputTimesUs.add(sampleTransformer.getCurrentFrameOutputTimeUs(inputTimeUs));
}
return outputTimesUs;
}
}

View File

@ -0,0 +1,85 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData;
import com.google.android.exoplayer2.metadata.mp4.SlowMotionData.Segment;
import com.google.android.exoplayer2.metadata.mp4.SmtaMetadataEntry;
import com.google.common.collect.ImmutableList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit tests for {@link SegmentSpeedProvider}. */
@RunWith(AndroidJUnit4.class)
public class SegmentSpeedProviderTest {
private static final SmtaMetadataEntry SMTA_SPEED_8 =
new SmtaMetadataEntry(/* captureFrameRate= */ 240, /* svcTemporalLayerCount= */ 4);
@Test
public void getSpeed_noSegments_returnsBaseSpeed() {
SegmentSpeedProvider provider =
new SegmentSpeedProvider(
new Format.Builder().setMetadata(new Metadata(SMTA_SPEED_8)).build());
assertThat(provider.getSpeed(0)).isEqualTo(8);
assertThat(provider.getSpeed(1_000_000)).isEqualTo(8);
}
@Test
public void getSpeed_returnsCorrectSpeed() {
List<Segment> segments =
ImmutableList.of(
new Segment(/* startTimeMs= */ 500, /* endTimeMs= */ 1000, /* speedDivisor= */ 8),
new Segment(/* startTimeMs= */ 1500, /* endTimeMs= */ 2000, /* speedDivisor= */ 4),
new Segment(/* startTimeMs= */ 2000, /* endTimeMs= */ 2500, /* speedDivisor= */ 2));
SegmentSpeedProvider provider =
new SegmentSpeedProvider(
new Format.Builder()
.setMetadata(new Metadata(new SlowMotionData(segments), SMTA_SPEED_8))
.build());
assertThat(provider.getSpeed(C.msToUs(0))).isEqualTo(8);
assertThat(provider.getSpeed(C.msToUs(500))).isEqualTo(1);
assertThat(provider.getSpeed(C.msToUs(800))).isEqualTo(1);
assertThat(provider.getSpeed(C.msToUs(1000))).isEqualTo(8);
assertThat(provider.getSpeed(C.msToUs(1250))).isEqualTo(8);
assertThat(provider.getSpeed(C.msToUs(1500))).isEqualTo(2);
assertThat(provider.getSpeed(C.msToUs(1650))).isEqualTo(2);
assertThat(provider.getSpeed(C.msToUs(2000))).isEqualTo(4);
assertThat(provider.getSpeed(C.msToUs(2400))).isEqualTo(4);
assertThat(provider.getSpeed(C.msToUs(2500))).isEqualTo(8);
assertThat(provider.getSpeed(C.msToUs(3000))).isEqualTo(8);
}
@Test
public void getSpeed_withNegativeTimestamp_throwsException() {
assertThrows(
IllegalArgumentException.class,
() ->
new SegmentSpeedProvider(
new Format.Builder().setMetadata(new Metadata(SMTA_SPEED_8)).build())
.getSpeed(-1));
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static org.junit.Assert.assertThrows;
import android.content.Context;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.util.MimeTypes;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link Transformer.Builder}. */
@RunWith(AndroidJUnit4.class)
public class TransformerBuilderTest {
@Test
public void setOutputMimeType_unsupportedMimeType_throws() {
assertThrows(
IllegalArgumentException.class,
() -> new Transformer.Builder().setOutputMimeType(MimeTypes.VIDEO_FLV));
}
@Test
public void build_withoutContext_throws() {
assertThrows(IllegalStateException.class, () -> new Transformer.Builder().build());
}
@Test
public void build_removeAudioAndVideo_throws() {
Context context = ApplicationProvider.getApplicationContext();
assertThrows(
IllegalStateException.class,
() ->
new Transformer.Builder()
.setContext(context)
.setRemoveAudio(true)
.setRemoveVideo(true)
.build());
}
}

View File

@ -0,0 +1,515 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_AVAILABLE;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_NO_TRANSFORMATION;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_UNAVAILABLE;
import static com.google.android.exoplayer2.transformer.Transformer.PROGRESS_STATE_WAITING_FOR_AVAILABILITY;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import android.content.Context;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.testutil.AutoAdvancingFakeClock;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.Iterables;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowMediaCodec;
/** Unit test for {@link Transformer}. */
@RunWith(AndroidJUnit4.class)
public final class TransformerTest {
private static final String FILE_VIDEO_ONLY = "asset:///media/mkv/sample.mkv";
private static final String FILE_AUDIO_ONLY = "asset:///media/amr/sample_nb.amr";
private static final String FILE_AUDIO_VIDEO = "asset:///media/mp4/sample.mp4";
// The ShadowMediaMuxer only outputs sample data to the output file.
private static final int FILE_VIDEO_ONLY_SAMPLE_DATA_LENGTH = 89_502;
private static final int FILE_AUDIO_ONLY_SAMPLE_DATA_LENGTH = 2834;
private static final int FILE_AUDIO_VIDEO_AUDIO_SAMPLE_DATA_LENGTH = 9529;
private static final int FILE_AUDIO_VIDEO_VIDEO_SAMPLE_DATA_LENGTH = 89_876;
private Context context;
private String outputPath;
private AutoAdvancingFakeClock clock;
private ProgressHolder progressHolder;
@Before
public void setUp() throws Exception {
context = ApplicationProvider.getApplicationContext();
outputPath = Util.createTempFile(context, "TransformerTest").getPath();
clock = new AutoAdvancingFakeClock();
progressHolder = new ProgressHolder();
createEncodersAndDecoders();
}
@After
public void tearDown() throws Exception {
Files.delete(Paths.get(outputPath));
removeEncodersAndDecoders();
}
@Test
public void startTransformation_videoOnly_completesSuccessfully() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_VIDEO_ONLY_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_audioOnly_completesSuccessfully() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_ONLY);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_AUDIO_ONLY_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_audioAndVideo_completesSuccessfully() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length())
.isEqualTo(
FILE_AUDIO_VIDEO_VIDEO_SAMPLE_DATA_LENGTH + FILE_AUDIO_VIDEO_AUDIO_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_withSubtitles_completesSuccessfully() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri("asset:///media/mkv/sample_with_srt.mkv");
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(89_502);
}
@Test
public void startTransformation_successiveTransformations_completesSuccessfully()
throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
// Transform first media item.
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
Files.delete(Paths.get(outputPath));
// Transformer.startTransformation() will create a new SimpleExoPlayer instance. Reset the
// clock's handler so that the clock advances with the new SimpleExoPlayer instance.
clock.resetHandler();
// Transform second media item.
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_VIDEO_ONLY_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_concurrentTransformations_throwsError() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
transformer.startTransformation(mediaItem, outputPath);
assertThrows(
IllegalStateException.class, () -> transformer.startTransformation(mediaItem, outputPath));
}
@Test
public void startTransformation_removeAudio_completesSuccessfully() throws Exception {
Transformer transformer =
new Transformer.Builder().setContext(context).setRemoveAudio(true).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_AUDIO_VIDEO_VIDEO_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_removeVideo_completesSuccessfully() throws Exception {
Transformer transformer =
new Transformer.Builder().setContext(context).setRemoveVideo(true).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_VIDEO);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_AUDIO_VIDEO_AUDIO_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_flattenForSlowMotion_completesSuccessfully() throws Exception {
Transformer transformer =
new Transformer.Builder()
.setContext(context)
.setFlattenForSlowMotion(true)
.setClock(clock)
.build();
MediaItem mediaItem = MediaItem.fromUri("asset:///media/mp4/sample_sef_slow_motion.mp4");
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(18_172);
}
@Test
public void startTransformation_withPlayerError_completesWithError() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri("asset:///non-existing-path.mp4");
transformer.startTransformation(mediaItem, outputPath);
Exception exception = TransformerTestRunner.runUntilError(transformer);
assertThat(exception).isInstanceOf(ExoPlaybackException.class);
assertThat(exception).hasCauseThat().isInstanceOf(IOException.class);
}
@Test
public void startTransformation_withAllSampleFormatsUnsupported_completesWithError()
throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri("asset:///media/mp4/sample_ac3.mp4");
transformer.startTransformation(mediaItem, outputPath);
Exception exception = TransformerTestRunner.runUntilError(transformer);
assertThat(exception).isInstanceOf(IllegalStateException.class);
}
@Test
public void startTransformation_afterCancellation_completesSuccessfully() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
transformer.startTransformation(mediaItem, outputPath);
transformer.cancel();
Files.delete(Paths.get(outputPath));
// Transformer.startTransformation() will create a new SimpleExoPlayer instance. Reset the
// clock's handler so that the clock advances with the new SimpleExoPlayer instance.
clock.resetHandler();
// This would throw if the previous transformation had not been cancelled.
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(new File(outputPath).length()).isEqualTo(FILE_VIDEO_ONLY_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_fromSpecifiedThread_completesSuccessfully() throws Exception {
HandlerThread anotherThread = new HandlerThread("AnotherThread");
anotherThread.start();
Looper looper = anotherThread.getLooper();
Transformer transformer =
new Transformer.Builder().setContext(context).setLooper(looper).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_ONLY);
AtomicReference<Exception> exception = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
new Handler(looper)
.post(
() -> {
try {
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
} catch (Exception e) {
exception.set(e);
} finally {
countDownLatch.countDown();
}
});
countDownLatch.await();
assertThat(exception.get()).isNull();
assertThat(new File(outputPath).length()).isEqualTo(FILE_AUDIO_ONLY_SAMPLE_DATA_LENGTH);
}
@Test
public void startTransformation_fromWrongThread_throwsError() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_AUDIO_ONLY);
HandlerThread anotherThread = new HandlerThread("AnotherThread");
AtomicReference<IllegalStateException> illegalStateException = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
anotherThread.start();
new Handler(anotherThread.getLooper())
.post(
() -> {
try {
transformer.startTransformation(mediaItem, outputPath);
} catch (IOException e) {
// Do nothing.
} catch (IllegalStateException e) {
illegalStateException.set(e);
} finally {
countDownLatch.countDown();
}
});
countDownLatch.await();
assertThat(illegalStateException.get()).isNotNull();
}
@Test
public void getProgress_knownDuration_returnsConsistentStates() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
AtomicInteger previousProgressState =
new AtomicInteger(PROGRESS_STATE_WAITING_FOR_AVAILABILITY);
AtomicBoolean foundInconsistentState = new AtomicBoolean();
Handler progressHandler =
new Handler(Looper.myLooper()) {
@Override
public void handleMessage(Message msg) {
@Transformer.ProgressState int progressState = transformer.getProgress(progressHolder);
if (progressState == PROGRESS_STATE_UNAVAILABLE) {
foundInconsistentState.set(true);
return;
}
switch (previousProgressState.get()) {
case PROGRESS_STATE_WAITING_FOR_AVAILABILITY:
break;
case PROGRESS_STATE_AVAILABLE:
if (progressState == PROGRESS_STATE_WAITING_FOR_AVAILABILITY) {
foundInconsistentState.set(true);
return;
}
break;
case PROGRESS_STATE_NO_TRANSFORMATION:
if (progressState != PROGRESS_STATE_NO_TRANSFORMATION) {
foundInconsistentState.set(true);
return;
}
break;
default:
throw new IllegalStateException();
}
previousProgressState.set(progressState);
sendEmptyMessage(0);
}
};
transformer.startTransformation(mediaItem, outputPath);
progressHandler.sendEmptyMessage(0);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(foundInconsistentState.get()).isFalse();
}
@Test
public void getProgress_knownDuration_givesIncreasingPercentages() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
List<Integer> progresses = new ArrayList<>();
Handler progressHandler =
new Handler(Looper.myLooper()) {
@Override
public void handleMessage(Message msg) {
@Transformer.ProgressState int progressState = transformer.getProgress(progressHolder);
if (progressState == PROGRESS_STATE_NO_TRANSFORMATION) {
return;
}
if (progressState != PROGRESS_STATE_WAITING_FOR_AVAILABILITY
&& (progresses.isEmpty()
|| Iterables.getLast(progresses) != progressHolder.progress)) {
progresses.add(progressHolder.progress);
}
sendEmptyMessage(0);
}
};
transformer.startTransformation(mediaItem, outputPath);
progressHandler.sendEmptyMessage(0);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(progresses).isInOrder();
if (!progresses.isEmpty()) {
// The progress list could be empty if the transformation ends before any progress can be
// retrieved.
assertThat(progresses.get(0)).isAtLeast(0);
assertThat(Iterables.getLast(progresses)).isLessThan(100);
}
}
@Test
public void getProgress_noCurrentTransformation_returnsNoTransformation() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
@Transformer.ProgressState int stateBeforeTransform = transformer.getProgress(progressHolder);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
@Transformer.ProgressState int stateAfterTransform = transformer.getProgress(progressHolder);
assertThat(stateBeforeTransform).isEqualTo(Transformer.PROGRESS_STATE_NO_TRANSFORMATION);
assertThat(stateAfterTransform).isEqualTo(Transformer.PROGRESS_STATE_NO_TRANSFORMATION);
}
@Test
public void getProgress_unknownDuration_returnsConsistentStates() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri("asset:///media/mp4/sample_fragmented.mp4");
AtomicInteger previousProgressState =
new AtomicInteger(PROGRESS_STATE_WAITING_FOR_AVAILABILITY);
AtomicBoolean foundInconsistentState = new AtomicBoolean();
Handler progressHandler =
new Handler(Looper.myLooper()) {
@Override
public void handleMessage(Message msg) {
@Transformer.ProgressState int progressState = transformer.getProgress(progressHolder);
switch (previousProgressState.get()) {
case PROGRESS_STATE_WAITING_FOR_AVAILABILITY:
break;
case PROGRESS_STATE_UNAVAILABLE:
case PROGRESS_STATE_AVAILABLE: // See [Internal: b/176145097].
if (progressState == PROGRESS_STATE_WAITING_FOR_AVAILABILITY) {
foundInconsistentState.set(true);
return;
}
break;
case PROGRESS_STATE_NO_TRANSFORMATION:
if (progressState != PROGRESS_STATE_NO_TRANSFORMATION) {
foundInconsistentState.set(true);
return;
}
break;
default:
throw new IllegalStateException();
}
previousProgressState.set(progressState);
sendEmptyMessage(0);
}
};
transformer.startTransformation(mediaItem, outputPath);
progressHandler.sendEmptyMessage(0);
TransformerTestRunner.runUntilCompleted(transformer);
assertThat(foundInconsistentState.get()).isFalse();
}
@Test
public void getProgress_fromWrongThread_throwsError() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
HandlerThread anotherThread = new HandlerThread("AnotherThread");
AtomicReference<IllegalStateException> illegalStateException = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
anotherThread.start();
new Handler(anotherThread.getLooper())
.post(
() -> {
try {
transformer.getProgress(progressHolder);
} catch (IllegalStateException e) {
illegalStateException.set(e);
} finally {
countDownLatch.countDown();
}
});
countDownLatch.await();
assertThat(illegalStateException.get()).isNotNull();
}
@Test
public void cancel_afterCompletion_doesNotThrow() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
MediaItem mediaItem = MediaItem.fromUri(FILE_VIDEO_ONLY);
transformer.startTransformation(mediaItem, outputPath);
TransformerTestRunner.runUntilCompleted(transformer);
transformer.cancel();
}
@Test
public void cancel_fromWrongThread_throwsError() throws Exception {
Transformer transformer = new Transformer.Builder().setContext(context).setClock(clock).build();
HandlerThread anotherThread = new HandlerThread("AnotherThread");
AtomicReference<IllegalStateException> illegalStateException = new AtomicReference<>();
CountDownLatch countDownLatch = new CountDownLatch(1);
anotherThread.start();
new Handler(anotherThread.getLooper())
.post(
() -> {
try {
transformer.cancel();
} catch (IllegalStateException e) {
illegalStateException.set(e);
} finally {
countDownLatch.countDown();
}
});
countDownLatch.await();
assertThat(illegalStateException.get()).isNotNull();
}
private static void createEncodersAndDecoders() {
ShadowMediaCodec.CodecConfig codecConfig =
new ShadowMediaCodec.CodecConfig(
/* inputBufferSize= */ 10_000,
/* outputBufferSize= */ 10_000,
/* codec= */ (in, out) -> out.put(in));
ShadowMediaCodec.addDecoder(MimeTypes.AUDIO_AAC, codecConfig);
ShadowMediaCodec.addDecoder(MimeTypes.AUDIO_AMR_NB, codecConfig);
ShadowMediaCodec.addEncoder(MimeTypes.AUDIO_AAC, codecConfig);
}
private static void removeEncodersAndDecoders() {
ShadowMediaCodec.clearCodecs();
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.robolectric.RobolectricUtil.runLooperUntil;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.robolectric.RobolectricUtil;
import java.util.concurrent.TimeoutException;
/** Helper class to run a {@link Transformer} test. */
public final class TransformerTestRunner {
private TransformerTestRunner() {}
/**
* Runs tasks of the {@link Transformer#getApplicationLooper() transformer Looper} until the
* current {@link Transformer transformation} completes.
*
* @param transformer The {@link Transformer}.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
* @throws IllegalStateException If the method is not called from the main thread, or if the
* transformation completes with error.
*/
public static void runUntilCompleted(Transformer transformer) throws TimeoutException {
@Nullable Exception exception = runUntilListenerCalled(transformer);
if (exception != null) {
throw new IllegalStateException(exception);
}
}
/**
* Runs tasks of the {@link Transformer#getApplicationLooper() transformer Looper} until a {@link
* Transformer} error occurs.
*
* @param transformer The {@link Transformer}.
* @return The raised exception.
* @throws TimeoutException If the {@link RobolectricUtil#DEFAULT_TIMEOUT_MS default timeout} is
* exceeded.
* @throws IllegalStateException If the method is not called from the main thread, or if the
* transformation completes without error.
*/
public static Exception runUntilError(Transformer transformer) throws TimeoutException {
@Nullable Exception exception = runUntilListenerCalled(transformer);
if (exception == null) {
throw new IllegalStateException("The transformation completed without error.");
}
return exception;
}
@Nullable
private static Exception runUntilListenerCalled(Transformer transformer) throws TimeoutException {
TransformationResult transformationResult = new TransformationResult();
Transformer.Listener listener =
new Transformer.Listener() {
@Override
public void onTransformationCompleted(MediaItem inputMediaItem) {
transformationResult.isCompleted = true;
}
@Override
public void onTransformationError(MediaItem inputMediaItem, Exception exception) {
transformationResult.exception = exception;
}
};
transformer.setListener(listener);
runLooperUntil(
transformer.getApplicationLooper(),
() -> transformationResult.isCompleted || transformationResult.exception != null);
return transformationResult.exception;
}
private static class TransformationResult {
public boolean isCompleted;
@Nullable public Exception exception;
}
}