Merge pull request #7162 from google/dev-v2-r2.11.4

r2.11.4
This commit is contained in:
Oliver Woodman 2020-04-08 22:48:19 +01:00 committed by GitHub
commit 7d3f54a375
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
85 changed files with 2733 additions and 719 deletions

View File

@ -1,5 +1,59 @@
# Release notes #
### 2.11.4 (2020-04-08) ###
* Add `SimpleExoPlayer.setWakeMode` to allow automatic `WifiLock` and `WakeLock`
handling ([#6914](https://github.com/google/ExoPlayer/issues/6914)). To use
this feature, you must add the
[WAKE_LOCK](https://developer.android.com/reference/android/Manifest.permission.html#WAKE_LOCK)
permission to your application's manifest file.
* Text:
* Catch and log exceptions in `TextRenderer` rather than re-throwing. This
allows playback to continue even if subtitle decoding fails
([#6885](https://github.com/google/ExoPlayer/issues/6885)).
* Allow missing hours and milliseconds in SubRip (.srt) timecodes
([#7122](https://github.com/google/ExoPlayer/issues/7122)).
* Audio:
* Enable playback speed adjustment and silence skipping for floating point PCM
audio, via resampling to 16-bit integer PCM. To output the original floating
point audio without adjustment, pass `enableFloatOutput=true` to the
`DefaultAudioSink` constructor
([#7134](https://github.com/google/ExoPlayer/issues/7134)).
* Workaround issue that could cause slower than realtime playback of AAC on
Android 10 ([#6671](https://github.com/google/ExoPlayer/issues/6671).
* Fix case where another app spuriously holding transient audio focus could
prevent ExoPlayer from acquiring audio focus for an indefinite period of
time ([#7182](https://github.com/google/ExoPlayer/issues/7182).
* Fix case where the player volume could be permanently ducked if audio focus
was released whilst ducking.
* Fix playback of WAV files with trailing non-media bytes
([#7129](https://github.com/google/ExoPlayer/issues/7129)).
* Fix playback of ADTS files with mid-stream ID3 metadata.
* DRM:
* Fix stuck ad playbacks with DRM protected content
([#7188](https://github.com/google/ExoPlayer/issues/7188)).
* Fix playback of Widevine protected content that only provides V1 PSSH atoms
on API levels 21 and 22.
* Fix playback of PlayReady content on Fire TV Stick (Gen 2).
* DASH:
* Update the manifest URI to avoid repeated HTTP redirects
([#6907](https://github.com/google/ExoPlayer/issues/6907)).
* Parse period `AssetIdentifier` elements.
* HLS: Recognize IMSC subtitles
([#7185](https://github.com/google/ExoPlayer/issues/7185)).
* UI: Add an option to set whether to use the orientation sensor for rotation
in spherical playbacks
([#6761](https://github.com/google/ExoPlayer/issues/6761)).
* Analytics: Fix `PlaybackStatsListener` behavior when not keeping history
([#7160](https://github.com/google/ExoPlayer/issues/7160)).
* FFmpeg extension: Add support for `x86_64` architecture.
* Opus extension: Fix parsing of negative gain values
([#7046](https://github.com/google/ExoPlayer/issues/7046)).
* Cast extension: Upgrade `play-services-cast-framework` dependency to 18.1.0.
This fixes an issue where `RemoteServiceException` was thrown due to
`Context.startForegroundService()` not calling `Service.startForeground()`
([#7191](https://github.com/google/ExoPlayer/issues/7191)).
### 2.11.3 (2020-02-19) ###
* SmoothStreaming: Fix regression that broke playback in 2.11.2

View File

@ -13,8 +13,8 @@
// limitations under the License.
project.ext {
// ExoPlayer version and version code.
releaseVersion = '2.11.3'
releaseVersionCode = 2011003
releaseVersion = '2.11.4'
releaseVersionCode = 2011004
minSdkVersion = 16
appTargetSdkVersion = 29
targetSdkVersion = 28 // TODO: Bump once b/143232359 is resolved

View File

@ -57,8 +57,8 @@ dependencies {
implementation project(modulePrefix + 'library-ui')
implementation project(modulePrefix + 'extension-cast')
implementation 'androidx.appcompat:appcompat:' + androidxAppCompatVersion
implementation 'androidx.recyclerview:recyclerview:1.0.0'
implementation 'com.google.android.material:material:1.0.0'
implementation 'androidx.recyclerview:recyclerview:1.1.0'
implementation 'com.google.android.material:material:1.1.0'
}
apply plugin: 'com.google.android.gms.strict-version-matcher-plugin'

View File

@ -18,6 +18,7 @@
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE"/>
<uses-sdk/>

View File

@ -28,6 +28,7 @@
<activity
android:name=".MainActivity"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize|screenLayout|smallestScreenSize|uiMode"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>

View File

@ -64,7 +64,7 @@ android {
dependencies {
implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
implementation 'androidx.appcompat:appcompat:' + androidxAppCompatVersion
implementation 'com.google.android.material:material:1.0.0'
implementation 'com.google.android.material:material:1.1.0'
implementation project(modulePrefix + 'library-core')
implementation project(modulePrefix + 'library-dash')
implementation project(modulePrefix + 'library-hls')

View File

@ -39,6 +39,7 @@ import com.google.android.exoplayer2.PlaybackPreparer;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.RenderersFactory;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.demo.Sample.UriSample;
import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
import com.google.android.exoplayer2.drm.DrmSessionManager;
@ -380,6 +381,7 @@ public class PlayerActivity extends AppCompatActivity
.setTrackSelector(trackSelector)
.build();
player.addListener(new PlayerEventListener());
player.setAudioAttributes(AudioAttributes.DEFAULT, /* handleAudioFocus= */ true);
player.setPlayWhenReady(startAutoPlay);
player.addAnalyticsListener(new EventLogger(trackSelector));
playerView.setPlayer(player);

View File

@ -17,6 +17,7 @@ package com.google.android.exoplayer2.demo;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.net.Uri;
import android.os.AsyncTask;
@ -34,6 +35,7 @@ import android.widget.ExpandableListView.OnChildClickListener;
import android.widget.ImageButton;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.google.android.exoplayer2.ParserException;
@ -63,6 +65,7 @@ public class SampleChooserActivity extends AppCompatActivity
private static final String TAG = "SampleChooserActivity";
private String[] uris;
private boolean useExtensionRenderers;
private DownloadTracker downloadTracker;
private SampleAdapter sampleAdapter;
@ -81,7 +84,6 @@ public class SampleChooserActivity extends AppCompatActivity
Intent intent = getIntent();
String dataUri = intent.getDataString();
String[] uris;
if (dataUri != null) {
uris = new String[] {dataUri};
} else {
@ -105,8 +107,7 @@ public class SampleChooserActivity extends AppCompatActivity
DemoApplication application = (DemoApplication) getApplication();
useExtensionRenderers = application.useExtensionRenderers();
downloadTracker = application.getDownloadTracker();
SampleListLoader loaderTask = new SampleListLoader();
loaderTask.execute(uris);
loadSample();
// Start the download service if it should be running but it's not currently.
// Starting the service in the foreground causes notification flicker if there is no scheduled
@ -157,6 +158,37 @@ public class SampleChooserActivity extends AppCompatActivity
sampleAdapter.notifyDataSetChanged();
}
@Override
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (grantResults.length == 0) {
// Empty results are triggered if a permission is requested while another request was already
// pending and can be safely ignored in this case.
return;
}
if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
loadSample();
} else {
Toast.makeText(getApplicationContext(), R.string.sample_list_load_error, Toast.LENGTH_LONG)
.show();
finish();
}
}
private void loadSample() {
Assertions.checkNotNull(uris);
for (int i = 0; i < uris.length; i++) {
Uri uri = Uri.parse(uris[i]);
if (Util.maybeRequestReadExternalStoragePermission(this, uri)) {
return;
}
}
SampleListLoader loaderTask = new SampleListLoader();
loaderTask.execute(uris);
}
private void onSampleGroups(final List<SampleGroup> groups, boolean sawError) {
if (sawError) {
Toast.makeText(getApplicationContext(), R.string.sample_list_load_error, Toast.LENGTH_LONG)

View File

@ -27,6 +27,7 @@
#endif // CPU_FEATURES_COMPILED_ANY_ARM_NEON
#include <jni.h>
#include <cstdint>
#include <cstring>
#include <mutex> // NOLINT
#include <new>
@ -121,18 +122,22 @@ const char* GetJniErrorMessage(JniStatusCode error_code) {
}
}
// Manages Libgav1FrameBuffer and reference information.
// Manages frame buffer and reference information.
class JniFrameBuffer {
public:
explicit JniFrameBuffer(int id) : id_(id), reference_count_(0) {
gav1_frame_buffer_.private_data = &id_;
}
explicit JniFrameBuffer(int id) : id_(id), reference_count_(0) {}
~JniFrameBuffer() {
for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
delete[] gav1_frame_buffer_.data[plane_index];
delete[] raw_buffer_[plane_index];
}
}
// Not copyable or movable.
JniFrameBuffer(const JniFrameBuffer&) = delete;
JniFrameBuffer(JniFrameBuffer&&) = delete;
JniFrameBuffer& operator=(const JniFrameBuffer&) = delete;
JniFrameBuffer& operator=(JniFrameBuffer&&) = delete;
void SetFrameData(const libgav1::DecoderBuffer& decoder_buffer) {
for (int plane_index = kPlaneY; plane_index < decoder_buffer.NumPlanes();
plane_index++) {
@ -160,9 +165,8 @@ class JniFrameBuffer {
void RemoveReference() { reference_count_--; }
bool InUse() const { return reference_count_ != 0; }
const Libgav1FrameBuffer& GetGav1FrameBuffer() const {
return gav1_frame_buffer_;
}
uint8_t* RawBuffer(int plane_index) const { return raw_buffer_[plane_index]; }
void* BufferPrivateData() const { return const_cast<int*>(&id_); }
// Attempts to reallocate data planes if the existing ones don't have enough
// capacity. Returns true if the allocation was successful or wasn't needed,
@ -172,15 +176,14 @@ class JniFrameBuffer {
for (int plane_index = kPlaneY; plane_index < kMaxPlanes; plane_index++) {
const int min_size =
(plane_index == kPlaneY) ? y_plane_min_size : uv_plane_min_size;
if (gav1_frame_buffer_.size[plane_index] >= min_size) continue;
delete[] gav1_frame_buffer_.data[plane_index];
gav1_frame_buffer_.data[plane_index] =
new (std::nothrow) uint8_t[min_size];
if (!gav1_frame_buffer_.data[plane_index]) {
gav1_frame_buffer_.size[plane_index] = 0;
if (raw_buffer_size_[plane_index] >= min_size) continue;
delete[] raw_buffer_[plane_index];
raw_buffer_[plane_index] = new (std::nothrow) uint8_t[min_size];
if (!raw_buffer_[plane_index]) {
raw_buffer_size_[plane_index] = 0;
return false;
}
gav1_frame_buffer_.size[plane_index] = min_size;
raw_buffer_size_[plane_index] = min_size;
}
return true;
}
@ -190,9 +193,12 @@ class JniFrameBuffer {
uint8_t* plane_[kMaxPlanes];
int displayed_width_[kMaxPlanes];
int displayed_height_[kMaxPlanes];
int id_;
const int id_;
int reference_count_;
Libgav1FrameBuffer gav1_frame_buffer_ = {};
// Pointers to the raw buffers allocated for the data planes.
uint8_t* raw_buffer_[kMaxPlanes] = {};
// Sizes of the raw buffers in bytes.
size_t raw_buffer_size_[kMaxPlanes] = {};
};
// Manages frame buffers used by libgav1 decoder and ExoPlayer.
@ -210,7 +216,7 @@ class JniBufferManager {
}
JniStatusCode GetBuffer(size_t y_plane_min_size, size_t uv_plane_min_size,
Libgav1FrameBuffer* frame_buffer) {
JniFrameBuffer** jni_buffer) {
std::lock_guard<std::mutex> lock(mutex_);
JniFrameBuffer* output_buffer;
@ -230,7 +236,7 @@ class JniBufferManager {
}
output_buffer->AddReference();
*frame_buffer = output_buffer->GetGav1FrameBuffer();
*jni_buffer = output_buffer;
return kJniStatusOk;
}
@ -316,29 +322,46 @@ struct JniContext {
JniStatusCode jni_status_code = kJniStatusOk;
};
int Libgav1GetFrameBuffer(void* private_data, size_t y_plane_min_size,
size_t uv_plane_min_size,
Libgav1FrameBuffer* frame_buffer) {
JniContext* const context = reinterpret_cast<JniContext*>(private_data);
Libgav1StatusCode Libgav1GetFrameBuffer(void* callback_private_data,
int bitdepth,
libgav1::ImageFormat image_format,
int width, int height, int left_border,
int right_border, int top_border,
int bottom_border, int stride_alignment,
libgav1::FrameBuffer* frame_buffer) {
libgav1::FrameBufferInfo info;
Libgav1StatusCode status = libgav1::ComputeFrameBufferInfo(
bitdepth, image_format, width, height, left_border, right_border,
top_border, bottom_border, stride_alignment, &info);
if (status != kLibgav1StatusOk) return status;
JniContext* const context = static_cast<JniContext*>(callback_private_data);
JniFrameBuffer* jni_buffer;
context->jni_status_code = context->buffer_manager.GetBuffer(
y_plane_min_size, uv_plane_min_size, frame_buffer);
info.y_buffer_size, info.uv_buffer_size, &jni_buffer);
if (context->jni_status_code != kJniStatusOk) {
LOGE("%s", GetJniErrorMessage(context->jni_status_code));
return -1;
}
return 0;
return kLibgav1StatusOutOfMemory;
}
int Libgav1ReleaseFrameBuffer(void* private_data,
Libgav1FrameBuffer* frame_buffer) {
JniContext* const context = reinterpret_cast<JniContext*>(private_data);
const int buffer_id = *reinterpret_cast<int*>(frame_buffer->private_data);
uint8_t* const y_buffer = jni_buffer->RawBuffer(0);
uint8_t* const u_buffer =
(info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(1) : nullptr;
uint8_t* const v_buffer =
(info.uv_buffer_size != 0) ? jni_buffer->RawBuffer(2) : nullptr;
return libgav1::SetFrameBuffer(&info, y_buffer, u_buffer, v_buffer,
jni_buffer->BufferPrivateData(), frame_buffer);
}
void Libgav1ReleaseFrameBuffer(void* callback_private_data,
void* buffer_private_data) {
JniContext* const context = static_cast<JniContext*>(callback_private_data);
const int buffer_id = *static_cast<const int*>(buffer_private_data);
context->jni_status_code = context->buffer_manager.ReleaseBuffer(buffer_id);
if (context->jni_status_code != kJniStatusOk) {
LOGE("%s", GetJniErrorMessage(context->jni_status_code));
return -1;
}
return 0;
}
constexpr int AlignTo16(int value) { return (value + 15) & (~15); }
@ -508,8 +531,8 @@ DECODER_FUNC(jlong, gav1Init, jint threads) {
libgav1::DecoderSettings settings;
settings.threads = threads;
settings.get = Libgav1GetFrameBuffer;
settings.release = Libgav1ReleaseFrameBuffer;
settings.get_frame_buffer = Libgav1GetFrameBuffer;
settings.release_frame_buffer = Libgav1ReleaseFrameBuffer;
settings.callback_private_data = context;
context->libgav1_status_code = context->decoder.Init(&settings);
@ -544,7 +567,8 @@ DECODER_FUNC(jint, gav1Decode, jlong jContext, jobject encodedData,
const uint8_t* const buffer = reinterpret_cast<const uint8_t*>(
env->GetDirectBufferAddress(encodedData));
context->libgav1_status_code =
context->decoder.EnqueueFrame(buffer, length, /*user_private_data=*/0);
context->decoder.EnqueueFrame(buffer, length, /*user_private_data=*/0,
/*buffer_private_data=*/nullptr);
if (context->libgav1_status_code != kLibgav1StatusOk) {
return kStatusError;
}
@ -619,7 +643,7 @@ DECODER_FUNC(jint, gav1GetFrame, jlong jContext, jobject jOutputBuffer,
}
const int buffer_id =
*reinterpret_cast<int*>(decoder_buffer->buffer_private_data);
*static_cast<const int*>(decoder_buffer->buffer_private_data);
context->buffer_manager.AddBufferReference(buffer_id);
JniFrameBuffer* const jni_buffer =
context->buffer_manager.GetBuffer(buffer_id);

View File

@ -31,7 +31,7 @@ android {
}
dependencies {
api 'com.google.android.gms:play-services-cast-framework:17.0.0'
api 'com.google.android.gms:play-services-cast-framework:18.1.0'
implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
implementation project(modulePrefix + 'library-core')
implementation project(modulePrefix + 'library-ui')

View File

@ -35,22 +35,22 @@ FFMPEG_EXT_PATH="$(pwd)/extensions/ffmpeg/src/main/jni"
NDK_PATH="<path to Android NDK>"
```
* Set up host platform ("darwin-x86_64" for Mac OS X):
* Set the host platform (use "darwin-x86_64" for Mac OS X):
```
HOST_PLATFORM="linux-x86_64"
```
* Configure the formats supported by adapting the following variable if needed
and by setting it. See the [Supported formats][] page for more details of the
formats.
* Configure the decoders to include. See the [Supported formats][] page for
details of the available decoders, and which formats they support.
```
ENABLED_DECODERS=(vorbis opus flac)
```
* Fetch and build FFmpeg. For example, executing script `build_ffmpeg.sh` will
fetch and build FFmpeg release 4.2 for armeabi-v7a, arm64-v8a and x86:
* Fetch and build FFmpeg. Executing `build_ffmpeg.sh` will fetch and build
FFmpeg 4.2 for `armeabi-v7a`, `arm64-v8a`, `x86` and `x86_64`. The script can
be edited if you need to build for different architectures.
```
cd "${FFMPEG_EXT_PATH}" && \
@ -63,7 +63,7 @@ cd "${FFMPEG_EXT_PATH}" && \
```
cd "${FFMPEG_EXT_PATH}" && \
${NDK_PATH}/ndk-build APP_ABI="armeabi-v7a arm64-v8a x86" -j4
${NDK_PATH}/ndk-build APP_ABI="armeabi-v7a arm64-v8a x86 x86_64" -j4
```
## Build instructions (Windows) ##

View File

@ -33,7 +33,7 @@ public final class FfmpegLibrary {
private static final String TAG = "FfmpegLibrary";
private static final LibraryLoader LOADER =
new LibraryLoader("avutil", "avresample", "swresample", "avcodec", "ffmpeg");
new LibraryLoader("avutil", "swresample", "avcodec", "ffmpeg");
private FfmpegLibrary() {}

View File

@ -21,11 +21,6 @@ LOCAL_MODULE := libavcodec
LOCAL_SRC_FILES := ffmpeg/android-libs/$(TARGET_ARCH_ABI)/$(LOCAL_MODULE).so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := libavresample
LOCAL_SRC_FILES := ffmpeg/android-libs/$(TARGET_ARCH_ABI)/$(LOCAL_MODULE).so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := libswresample
LOCAL_SRC_FILES := ffmpeg/android-libs/$(TARGET_ARCH_ABI)/$(LOCAL_MODULE).so
@ -40,6 +35,6 @@ include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := ffmpeg_jni.cc
LOCAL_C_INCLUDES := ffmpeg
LOCAL_SHARED_LIBRARIES := libavcodec libavresample libswresample libavutil
LOCAL_SHARED_LIBRARIES := libavcodec libswresample libavutil
LOCAL_LDLIBS := -Lffmpeg/android-libs/$(TARGET_ARCH_ABI) -llog
include $(BUILD_SHARED_LIBRARY)

View File

@ -32,8 +32,9 @@ COMMON_OPTIONS="
--disable-postproc
--disable-avfilter
--disable-symver
--enable-avresample
--disable-avresample
--enable-swresample
--extra-ldexeflags=-pie
"
TOOLCHAIN_PREFIX="${NDK_PATH}/toolchains/llvm/prebuilt/${HOST_PLATFORM}/bin"
for decoder in "${ENABLED_DECODERS[@]}"
@ -53,7 +54,6 @@ git checkout release/4.2
--strip="${TOOLCHAIN_PREFIX}/arm-linux-androideabi-strip" \
--extra-cflags="-march=armv7-a -mfloat-abi=softfp" \
--extra-ldflags="-Wl,--fix-cortex-a8" \
--extra-ldexeflags=-pie \
${COMMON_OPTIONS}
make -j4
make install-libs
@ -65,7 +65,6 @@ make clean
--cross-prefix="${TOOLCHAIN_PREFIX}/aarch64-linux-android21-" \
--nm="${TOOLCHAIN_PREFIX}/aarch64-linux-android-nm" \
--strip="${TOOLCHAIN_PREFIX}/aarch64-linux-android-strip" \
--extra-ldexeflags=-pie \
${COMMON_OPTIONS}
make -j4
make install-libs
@ -77,7 +76,18 @@ make clean
--cross-prefix="${TOOLCHAIN_PREFIX}/i686-linux-android16-" \
--nm="${TOOLCHAIN_PREFIX}/i686-linux-android-nm" \
--strip="${TOOLCHAIN_PREFIX}/i686-linux-android-strip" \
--extra-ldexeflags=-pie \
--disable-asm \
${COMMON_OPTIONS}
make -j4
make install-libs
make clean
./configure \
--libdir=android-libs/x86_64 \
--arch=x86_64 \
--cpu=x86_64 \
--cross-prefix="${TOOLCHAIN_PREFIX}/x86_64-linux-android21-" \
--nm="${TOOLCHAIN_PREFIX}/x86_64-linux-android-nm" \
--strip="${TOOLCHAIN_PREFIX}/x86_64-linux-android-strip" \
--disable-asm \
${COMMON_OPTIONS}
make -j4

View File

@ -26,10 +26,10 @@ extern "C" {
#include <stdint.h>
#endif
#include <libavcodec/avcodec.h>
#include <libavresample/avresample.h>
#include <libavutil/channel_layout.h>
#include <libavutil/error.h>
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
}
#define LOG_TAG "ffmpeg_jni"
@ -289,11 +289,11 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
int sampleCount = frame->nb_samples;
int dataSize = av_samples_get_buffer_size(NULL, channelCount, sampleCount,
sampleFormat, 1);
AVAudioResampleContext *resampleContext;
SwrContext *resampleContext;
if (context->opaque) {
resampleContext = (AVAudioResampleContext *) context->opaque;
resampleContext = (SwrContext *)context->opaque;
} else {
resampleContext = avresample_alloc_context();
resampleContext = swr_alloc();
av_opt_set_int(resampleContext, "in_channel_layout", channelLayout, 0);
av_opt_set_int(resampleContext, "out_channel_layout", channelLayout, 0);
av_opt_set_int(resampleContext, "in_sample_rate", sampleRate, 0);
@ -302,9 +302,9 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
// The output format is always the requested format.
av_opt_set_int(resampleContext, "out_sample_fmt",
context->request_sample_fmt, 0);
result = avresample_open(resampleContext);
result = swr_init(resampleContext);
if (result < 0) {
logError("avresample_open", result);
logError("swr_init", result);
av_frame_free(&frame);
return -1;
}
@ -312,7 +312,7 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
}
int inSampleSize = av_get_bytes_per_sample(sampleFormat);
int outSampleSize = av_get_bytes_per_sample(context->request_sample_fmt);
int outSamples = avresample_get_out_samples(resampleContext, sampleCount);
int outSamples = swr_get_out_samples(resampleContext, sampleCount);
int bufferOutSize = outSampleSize * channelCount * outSamples;
if (outSize + bufferOutSize > outputSize) {
LOGE("Output buffer size (%d) too small for output data (%d).",
@ -320,15 +320,14 @@ int decodePacket(AVCodecContext *context, AVPacket *packet,
av_frame_free(&frame);
return -1;
}
result = avresample_convert(resampleContext, &outputBuffer, bufferOutSize,
outSamples, frame->data, frame->linesize[0],
sampleCount);
result = swr_convert(resampleContext, &outputBuffer, bufferOutSize,
(const uint8_t **)frame->data, frame->nb_samples);
av_frame_free(&frame);
if (result < 0) {
logError("avresample_convert", result);
logError("swr_convert", result);
return result;
}
int available = avresample_available(resampleContext);
int available = swr_get_out_samples(resampleContext, 0);
if (available != 0) {
LOGE("Expected no samples remaining after resampling, but found %d.",
available);
@ -351,9 +350,9 @@ void releaseContext(AVCodecContext *context) {
if (!context) {
return;
}
AVAudioResampleContext *resampleContext;
if ((resampleContext = (AVAudioResampleContext *) context->opaque)) {
avresample_free(&resampleContext);
SwrContext *swrContext;
if ((swrContext = (SwrContext *)context->opaque)) {
swr_free(&swrContext);
context->opaque = NULL;
}
avcodec_free_context(&context);

View File

@ -223,7 +223,7 @@ public class OkHttpDataSource extends BaseDataSource implements HttpDataSource {
responseByteStream = responseBody.byteStream();
} catch (IOException e) {
throw new HttpDataSourceException(
"Unable to connect to " + dataSpec.uri, e, dataSpec, HttpDataSourceException.TYPE_OPEN);
"Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN);
}
int responseCode = response.code();

View File

@ -37,7 +37,9 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class OpusPlaybackTest {
private static final String BEAR_OPUS_URI = "asset:///bear-opus.webm";
private static final String BEAR_OPUS_URI = "asset:///bear-opus.mka";
private static final String BEAR_OPUS_NEGATIVE_GAIN_URI =
"asset:///bear-opus-negative-gain.mka";
@Before
public void setUp() {
@ -51,6 +53,11 @@ public class OpusPlaybackTest {
playUri(BEAR_OPUS_URI);
}
@Test
public void basicPlaybackNegativeGain() throws Exception {
playUri(BEAR_OPUS_NEGATIVE_GAIN_URI);
}
private void playUri(String uri) throws Exception {
TestPlaybackRunnable testPlaybackRunnable =
new TestPlaybackRunnable(Uri.parse(uri), ApplicationProvider.getApplicationContext());

View File

@ -90,8 +90,8 @@ import java.util.List;
if (channelCount > 8) {
throw new OpusDecoderException("Invalid channel count: " + channelCount);
}
int preskip = readLittleEndian16(headerBytes, 10);
int gain = readLittleEndian16(headerBytes, 16);
int preskip = readUnsignedLittleEndian16(headerBytes, 10);
int gain = readSignedLittleEndian16(headerBytes, 16);
byte[] streamMap = new byte[8];
int numStreams;
@ -228,12 +228,16 @@ import java.util.List;
return (int) (ns * SAMPLE_RATE / 1000000000);
}
private static int readLittleEndian16(byte[] input, int offset) {
private static int readUnsignedLittleEndian16(byte[] input, int offset) {
int value = input[offset] & 0xFF;
value |= (input[offset + 1] & 0xFF) << 8;
return value;
}
private static int readSignedLittleEndian16(byte[] input, int offset) {
return (short) readUnsignedLittleEndian16(input, offset);
}
private native long opusInit(int sampleRate, int channelCount, int numStreams, int numCoupled,
int gain, byte[] streamMap);
private native int opusDecode(long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize,

View File

@ -34,7 +34,7 @@ android {
dependencies {
implementation project(modulePrefix + 'library-core')
implementation 'androidx.work:work-runtime:2.2.0'
implementation 'androidx.work:work-runtime:2.3.4'
}
ext {

View File

@ -29,8 +29,7 @@ class CombinedJavadocPlugin implements Plugin<Project> {
classpath = project.files([])
destinationDir = project.file("$project.buildDir/docs/javadoc")
options {
links "https://docs.oracle.com/javase/7/docs/api/",
"https://developer.android.com/reference"
links "https://developer.android.com/reference"
encoding = "UTF-8"
}
exclude "**/BuildConfig.java"

View File

@ -26,9 +26,7 @@ android.libraryVariants.all { variant ->
title = "ExoPlayer ${javadocTitle}"
source = allSourceDirs
options {
links "http://docs.oracle.com/javase/7/docs/api/"
linksOffline "https://developer.android.com/reference",
"${android.sdkDirectory}/docs/reference"
links "https://developer.android.com/reference"
encoding = "UTF-8"
}
exclude "**/BuildConfig.java"

View File

@ -15,18 +15,11 @@ ext.fixJavadoc = {
def javadocPath = "${project.buildDir}/docs/javadoc"
// Fix external Android links to target the top frame.
def androidRoot = "https://developer.android.com/reference/"
def androidLink = "<a href=\"(${androidRoot}.*?)\\?is-external=true\""
def androidFixed = "<a href=\"\\1\" target=\"_top\""
def androidLink = "<a href=\"(${androidRoot}.*?)\\?is-external=true(.*)\""
def androidFixed = "<a href=\"\\1\\2\" target=\"_top\""
ant.replaceregexp(match:androidLink, replace:androidFixed, flags:'g') {
fileset(dir: "${javadocPath}", includes: "**/*.html")
}
// Fix external Oracle links to use frames and target the top frame.
def oracleRoot = "https://docs.oracle.com/javase/7/docs/api/"
def oracleLink = "<a href=\"(${oracleRoot})(.*?)\\?is-external=true\""
def oracleFixed = "<a href=\"\\1index.html\\?\\2\" target=\"_top\""
ant.replaceregexp(match:oracleLink, replace:oracleFixed, flags:'g') {
fileset(dir: "${javadocPath}", includes: "**/*.html")
}
// Add favicon to each page
def headTag = "<head>"
def headTagWithFavicon = "<head>" +

View File

@ -24,7 +24,6 @@ import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util;
@ -76,15 +75,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
AUDIO_FOCUS_STATE_LOST_FOCUS,
AUDIO_FOCUS_STATE_NO_FOCUS,
AUDIO_FOCUS_STATE_HAVE_FOCUS,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK
})
private @interface AudioFocusState {}
/** No audio focus was held, but has been lost by another app taking it permanently. */
private static final int AUDIO_FOCUS_STATE_LOST_FOCUS = -1;
/** No audio focus is currently being held. */
private static final int AUDIO_FOCUS_STATE_NO_FOCUS = 0;
/** The requested audio focus is currently held. */
@ -101,7 +97,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final AudioManager audioManager;
private final AudioFocusListener focusListener;
private final PlayerControl playerControl;
@Nullable private PlayerControl playerControl;
@Nullable private AudioAttributes audioAttributes;
@AudioFocusState private int audioFocusState;
@ -134,64 +130,45 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Sets audio attributes that should be used to manage audio focus.
*
* <p>Call {@link #updateAudioFocus(boolean, int)} to update the audio focus based on these
* attributes.
*
* @param audioAttributes The audio attributes or {@code null} if audio focus should not be
* managed automatically.
* @param playWhenReady The current state of {@link ExoPlayer#getPlayWhenReady()}.
* @param playerState The current player state; {@link ExoPlayer#getPlaybackState()}.
* @return A {@link PlayerCommand} to execute on the player.
*/
@PlayerCommand
public int setAudioAttributes(
@Nullable AudioAttributes audioAttributes, boolean playWhenReady, int playerState) {
public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) {
if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
this.audioAttributes = audioAttributes;
focusGain = convertAudioAttributesToFocusGain(audioAttributes);
Assertions.checkArgument(
focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE,
"Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME.");
if (playWhenReady
&& (playerState == Player.STATE_BUFFERING || playerState == Player.STATE_READY)) {
return requestAudioFocus();
}
}
return playerState == Player.STATE_IDLE
? handleIdle(playWhenReady)
: handlePrepare(playWhenReady);
}
/**
* Called by a player as part of {@link ExoPlayer#prepare(MediaSource, boolean, boolean)}.
* Called by the player to abandon or request audio focus based on the desired player state.
*
* @param playWhenReady The current state of {@link ExoPlayer#getPlayWhenReady()}.
* @param playWhenReady The desired value of playWhenReady.
* @param playbackState The desired playback state.
* @return A {@link PlayerCommand} to execute on the player.
*/
@PlayerCommand
public int handlePrepare(boolean playWhenReady) {
public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackState) {
if (shouldAbandonAudioFocus(playbackState)) {
abandonAudioFocus();
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
}
return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY;
}
/**
* Called by the player as part of {@link ExoPlayer#setPlayWhenReady(boolean)}.
*
* @param playWhenReady The desired value of playWhenReady.
* @param playerState The current state of the player.
* @return A {@link PlayerCommand} to execute on the player.
* Called when the manager is no longer required. Audio focus will be released without making any
* calls to the {@link PlayerControl}.
*/
@PlayerCommand
public int handleSetPlayWhenReady(boolean playWhenReady, int playerState) {
if (!playWhenReady) {
public void release() {
playerControl = null;
abandonAudioFocus();
return PLAYER_COMMAND_DO_NOT_PLAY;
}
return playerState == Player.STATE_IDLE ? handleIdle(playWhenReady) : requestAudioFocus();
}
/** Called by the player as part of {@link ExoPlayer#stop(boolean)}. */
public void handleStop() {
abandonAudioFocus(/* forceAbandon= */ true);
}
// Internal methods.
@ -201,62 +178,35 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return focusListener;
}
@PlayerCommand
private int handleIdle(boolean playWhenReady) {
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
private boolean shouldAbandonAudioFocus(@Player.State int playbackState) {
return playbackState == Player.STATE_IDLE || focusGain != C.AUDIOFOCUS_GAIN;
}
@PlayerCommand
private int requestAudioFocus() {
int focusRequestResult;
if (focusGain == C.AUDIOFOCUS_NONE) {
if (audioFocusState != AUDIO_FOCUS_STATE_NO_FOCUS) {
abandonAudioFocus(/* forceAbandon= */ true);
}
if (audioFocusState == AUDIO_FOCUS_STATE_HAVE_FOCUS) {
return PLAYER_COMMAND_PLAY_WHEN_READY;
}
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
if (Util.SDK_INT >= 26) {
focusRequestResult = requestAudioFocusV26();
int requestResult = Util.SDK_INT >= 26 ? requestAudioFocusV26() : requestAudioFocusDefault();
if (requestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
return PLAYER_COMMAND_PLAY_WHEN_READY;
} else {
focusRequestResult = requestAudioFocusDefault();
}
audioFocusState =
focusRequestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED
? AUDIO_FOCUS_STATE_HAVE_FOCUS
: AUDIO_FOCUS_STATE_NO_FOCUS;
}
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
return PLAYER_COMMAND_DO_NOT_PLAY;
}
return audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT
? PLAYER_COMMAND_WAIT_FOR_CALLBACK
: PLAYER_COMMAND_PLAY_WHEN_READY;
}
private void abandonAudioFocus() {
abandonAudioFocus(/* forceAbandon= */ false);
}
private void abandonAudioFocus(boolean forceAbandon) {
if (focusGain == C.AUDIOFOCUS_NONE && audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
return;
}
if (focusGain != C.AUDIOFOCUS_GAIN
|| audioFocusState == AUDIO_FOCUS_STATE_LOST_FOCUS
|| forceAbandon) {
if (Util.SDK_INT >= 26) {
abandonAudioFocusV26();
} else {
abandonAudioFocusDefault();
}
audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS;
}
setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
}
private int requestAudioFocusDefault() {
@ -312,7 +262,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/
@C.AudioFocusGain
private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) {
if (audioAttributes == null) {
// Don't handle audio focus. It may be either video only contents or developers
// want to have more finer grained control. (e.g. adding audio focus listener)
@ -382,63 +331,55 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
}
}
private void handleAudioFocusChange(int focusChange) {
// Convert the platform focus change to internal state.
switch (focusChange) {
case AudioManager.AUDIOFOCUS_LOSS:
audioFocusState = AUDIO_FOCUS_STATE_LOST_FOCUS;
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT;
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
if (willPauseWhenDucked()) {
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT;
} else {
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK;
}
break;
case AudioManager.AUDIOFOCUS_GAIN:
audioFocusState = AUDIO_FOCUS_STATE_HAVE_FOCUS;
break;
default:
Log.w(TAG, "Unknown focus change type: " + focusChange);
// Early return.
private void setAudioFocusState(@AudioFocusState int audioFocusState) {
if (this.audioFocusState == audioFocusState) {
return;
}
// Handle the internal state (change).
switch (audioFocusState) {
case AUDIO_FOCUS_STATE_NO_FOCUS:
// Focus was not requested; nothing to do.
break;
case AUDIO_FOCUS_STATE_LOST_FOCUS:
playerControl.executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus(/* forceAbandon= */ true);
break;
case AUDIO_FOCUS_STATE_LOSS_TRANSIENT:
playerControl.executePlayerCommand(PLAYER_COMMAND_WAIT_FOR_CALLBACK);
break;
case AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK:
// Volume will be adjusted by the code below.
break;
case AUDIO_FOCUS_STATE_HAVE_FOCUS:
playerControl.executePlayerCommand(PLAYER_COMMAND_PLAY_WHEN_READY);
break;
default:
throw new IllegalStateException("Unknown audio focus state: " + audioFocusState);
}
this.audioFocusState = audioFocusState;
float volumeMultiplier =
(audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK)
? AudioFocusManager.VOLUME_MULTIPLIER_DUCK
: AudioFocusManager.VOLUME_MULTIPLIER_DEFAULT;
if (AudioFocusManager.this.volumeMultiplier != volumeMultiplier) {
AudioFocusManager.this.volumeMultiplier = volumeMultiplier;
if (this.volumeMultiplier == volumeMultiplier) {
return;
}
this.volumeMultiplier = volumeMultiplier;
if (playerControl != null) {
playerControl.setVolumeMultiplier(volumeMultiplier);
}
}
private void handlePlatformAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
executePlayerCommand(PLAYER_COMMAND_PLAY_WHEN_READY);
return;
case AudioManager.AUDIOFOCUS_LOSS:
executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus();
return;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || willPauseWhenDucked()) {
executePlayerCommand(PLAYER_COMMAND_WAIT_FOR_CALLBACK);
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT);
} else {
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK);
}
return;
default:
Log.w(TAG, "Unknown focus change type: " + focusChange);
}
}
private void executePlayerCommand(@PlayerCommand int playerCommand) {
if (playerControl != null) {
playerControl.executePlayerCommand(playerCommand);
}
}
// Internal audio focus listener.
private class AudioFocusListener implements AudioManager.OnAudioFocusChangeListener {
@ -450,7 +391,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override
public void onAudioFocusChange(int focusChange) {
eventHandler.post(() -> handleAudioFocusChange(focusChange));
eventHandler.post(() -> handlePlatformAudioFocusChange(focusChange));
}
}
}

View File

@ -1019,6 +1019,37 @@ public final class C {
/** Network type for other connections which are not Wifi or cellular (e.g. VPN, Bluetooth). */
public static final int NETWORK_TYPE_OTHER = 8;
/**
* Mode specifying whether the player should hold a WakeLock and a WifiLock. One of {@link
* #WAKE_MODE_NONE}, {@link #WAKE_MODE_LOCAL} and {@link #WAKE_MODE_NETWORK}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({WAKE_MODE_NONE, WAKE_MODE_LOCAL, WAKE_MODE_NETWORK})
public @interface WakeMode {}
/**
* A wake mode that will not cause the player to hold any locks.
*
* <p>This is suitable for applications that do not play media with the screen off.
*/
public static final int WAKE_MODE_NONE = 0;
/**
* A wake mode that will cause the player to hold a {@link android.os.PowerManager.WakeLock}
* during playback.
*
* <p>This is suitable for applications that play media with the screen off and do not load media
* over wifi.
*/
public static final int WAKE_MODE_LOCAL = 1;
/**
* A wake mode that will cause the player to hold a {@link android.os.PowerManager.WakeLock} and a
* {@link android.net.wifi.WifiManager.WifiLock} during playback.
*
* <p>This is suitable for applications that play media with the screen off and may load media
* over wifi.
*/
public static final int WAKE_MODE_NETWORK = 2;
/**
* Track role flags. Possible flag values are {@link #ROLE_FLAG_MAIN}, {@link
* #ROLE_FLAG_ALTERNATE}, {@link #ROLE_FLAG_SUPPLEMENTARY}, {@link #ROLE_FLAG_COMMENTARY}, {@link

View File

@ -386,7 +386,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
playbackInfo = playbackInfo.copyWithPlaybackError(e);
maybeNotifyPlaybackInfoChanged();
} catch (IOException e) {
Log.e(TAG, "Source error.", e);
Log.e(TAG, "Source error", e);
stopInternal(
/* forceResetRenderers= */ false,
/* resetPositionAndState= */ false,
@ -394,7 +394,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
playbackInfo = playbackInfo.copyWithPlaybackError(ExoPlaybackException.createForSource(e));
maybeNotifyPlaybackInfoChanged();
} catch (RuntimeException | OutOfMemoryError e) {
Log.e(TAG, "Internal runtime error.", e);
Log.e(TAG, "Internal runtime error", e);
ExoPlaybackException error =
e instanceof OutOfMemoryError
? ExoPlaybackException.createForOutOfMemoryError((OutOfMemoryError) e)

View File

@ -29,11 +29,11 @@ public final class ExoPlayerLibraryInfo {
/** The version of the library expressed as a string, for example "1.2.3". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
public static final String VERSION = "2.11.3";
public static final String VERSION = "2.11.4";
/** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final String VERSION_SLASHY = "ExoPlayerLib/2.11.3";
public static final String VERSION_SLASHY = "ExoPlayerLib/2.11.4";
/**
* The version of the library expressed as an integer, for example 1002003.
@ -43,7 +43,7 @@ public final class ExoPlayerLibraryInfo {
* integer version 123045006 (123-045-006).
*/
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final int VERSION_INT = 2011003;
public static final int VERSION_INT = 2011004;
/**
* Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions}

View File

@ -325,6 +325,7 @@ public class SimpleExoPlayer extends BasePlayer
private final AudioBecomingNoisyManager audioBecomingNoisyManager;
private final AudioFocusManager audioFocusManager;
private final WakeLockManager wakeLockManager;
private final WifiLockManager wifiLockManager;
@Nullable private Format videoFormat;
@Nullable private Format audioFormat;
@ -445,8 +446,8 @@ public class SimpleExoPlayer extends BasePlayer
player =
new ExoPlayerImpl(renderers, trackSelector, loadControl, bandwidthMeter, clock, looper);
analyticsCollector.setPlayer(player);
addListener(analyticsCollector);
addListener(componentListener);
player.addListener(analyticsCollector);
player.addListener(componentListener);
videoDebugListeners.add(analyticsCollector);
videoListeners.add(analyticsCollector);
audioDebugListeners.add(analyticsCollector);
@ -460,6 +461,7 @@ public class SimpleExoPlayer extends BasePlayer
new AudioBecomingNoisyManager(context, eventHandler, componentListener);
audioFocusManager = new AudioFocusManager(context, eventHandler, componentListener);
wakeLockManager = new WakeLockManager(context);
wifiLockManager = new WifiLockManager(context);
}
@Override
@ -684,11 +686,11 @@ public class SimpleExoPlayer extends BasePlayer
}
}
audioFocusManager.setAudioAttributes(handleAudioFocus ? audioAttributes : null);
boolean playWhenReady = getPlayWhenReady();
@AudioFocusManager.PlayerCommand
int playerCommand =
audioFocusManager.setAudioAttributes(
handleAudioFocus ? audioAttributes : null, getPlayWhenReady(), getPlaybackState());
updatePlayWhenReady(getPlayWhenReady(), playerCommand);
int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState());
updatePlayWhenReady(playWhenReady, playerCommand);
}
@Override
@ -1187,9 +1189,10 @@ public class SimpleExoPlayer extends BasePlayer
}
this.mediaSource = mediaSource;
mediaSource.addEventListener(eventHandler, analyticsCollector);
boolean playWhenReady = getPlayWhenReady();
@AudioFocusManager.PlayerCommand
int playerCommand = audioFocusManager.handlePrepare(getPlayWhenReady());
updatePlayWhenReady(getPlayWhenReady(), playerCommand);
int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, Player.STATE_BUFFERING);
updatePlayWhenReady(playWhenReady, playerCommand);
player.prepare(mediaSource, resetPosition, resetState);
}
@ -1197,7 +1200,7 @@ public class SimpleExoPlayer extends BasePlayer
public void setPlayWhenReady(boolean playWhenReady) {
verifyApplicationThread();
@AudioFocusManager.PlayerCommand
int playerCommand = audioFocusManager.handleSetPlayWhenReady(playWhenReady, getPlaybackState());
int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState());
updatePlayWhenReady(playWhenReady, playerCommand);
}
@ -1276,6 +1279,7 @@ public class SimpleExoPlayer extends BasePlayer
@Override
public void stop(boolean reset) {
verifyApplicationThread();
audioFocusManager.updateAudioFocus(getPlayWhenReady(), Player.STATE_IDLE);
player.stop(reset);
if (mediaSource != null) {
mediaSource.removeEventListener(analyticsCollector);
@ -1284,7 +1288,6 @@ public class SimpleExoPlayer extends BasePlayer
mediaSource = null;
}
}
audioFocusManager.handleStop();
currentCues = Collections.emptyList();
}
@ -1292,8 +1295,9 @@ public class SimpleExoPlayer extends BasePlayer
public void release() {
verifyApplicationThread();
audioBecomingNoisyManager.setEnabled(false);
audioFocusManager.handleStop();
wakeLockManager.setStayAwake(false);
wifiLockManager.setStayAwake(false);
audioFocusManager.release();
player.release();
removeSurfaceCallbacks();
if (surface != null) {
@ -1432,9 +1436,45 @@ public class SimpleExoPlayer extends BasePlayer
*
* @param handleWakeLock Whether the player should use a {@link android.os.PowerManager.WakeLock}
* to ensure the device stays awake for playback, even when the screen is off.
* @deprecated Use {@link #setWakeMode(int)} instead.
*/
@Deprecated
public void setHandleWakeLock(boolean handleWakeLock) {
wakeLockManager.setEnabled(handleWakeLock);
setWakeMode(handleWakeLock ? C.WAKE_MODE_LOCAL : C.WAKE_MODE_NONE);
}
/**
* Sets how the player should keep the device awake for playback when the screen is off.
*
* <p>Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission.
* It should be used together with a foreground {@link android.app.Service} for use cases where
* playback occurs and the screen is off (e.g. background audio playback). It is not useful when
* the screen will be kept on during playback (e.g. foreground video playback).
*
* <p>When enabled, the locks ({@link android.os.PowerManager.WakeLock} / {@link
* android.net.wifi.WifiManager.WifiLock}) will be held whenever the player is in the {@link
* #STATE_READY} or {@link #STATE_BUFFERING} states with {@code playWhenReady = true}. The locks
* held depends on the specified {@link C.WakeMode}.
*
* @param wakeMode The {@link C.WakeMode} option to keep the device awake during playback.
*/
public void setWakeMode(@C.WakeMode int wakeMode) {
switch (wakeMode) {
case C.WAKE_MODE_NONE:
wakeLockManager.setEnabled(false);
wifiLockManager.setEnabled(false);
break;
case C.WAKE_MODE_LOCAL:
wakeLockManager.setEnabled(true);
wifiLockManager.setEnabled(false);
break;
case C.WAKE_MODE_NETWORK:
wakeLockManager.setEnabled(true);
wifiLockManager.setEnabled(true);
break;
default:
break;
}
}
// Internal methods.
@ -1537,6 +1577,24 @@ public class SimpleExoPlayer extends BasePlayer
}
}
private void updateWakeAndWifiLock() {
@State int playbackState = getPlaybackState();
switch (playbackState) {
case Player.STATE_READY:
case Player.STATE_BUFFERING:
wakeLockManager.setStayAwake(getPlayWhenReady());
wifiLockManager.setStayAwake(getPlayWhenReady());
break;
case Player.STATE_ENDED:
case Player.STATE_IDLE:
wakeLockManager.setStayAwake(false);
wifiLockManager.setStayAwake(false);
break;
default:
throw new IllegalStateException();
}
}
private final class ComponentListener
implements VideoRendererEventListener,
AudioRendererEventListener,
@ -1781,16 +1839,7 @@ public class SimpleExoPlayer extends BasePlayer
@Override
public void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) {
switch (playbackState) {
case Player.STATE_READY:
case Player.STATE_BUFFERING:
wakeLockManager.setStayAwake(playWhenReady);
break;
case Player.STATE_ENDED:
case Player.STATE_IDLE:
wakeLockManager.setStayAwake(false);
break;
}
updateWakeAndWifiLock();
}
}
}

View File

@ -39,7 +39,8 @@ import com.google.android.exoplayer2.util.Log;
private boolean stayAwake;
public WakeLockManager(Context context) {
powerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
powerManager =
(PowerManager) context.getApplicationContext().getSystemService(Context.POWER_SERVICE);
}
/**
@ -48,18 +49,19 @@ import com.google.android.exoplayer2.util.Log;
* <p>By default, wake lock handling is not enabled. Enabling this will acquire the wake lock if
* necessary. Disabling this will release the wake lock if it is held.
*
* @param enabled True if the player should handle a {@link WakeLock}, false otherwise. Please
* note that enabling this requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
* <p>Enabling {@link WakeLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WakeLock}, false otherwise.
*/
public void setEnabled(boolean enabled) {
if (enabled) {
if (wakeLock == null) {
if (powerManager == null) {
Log.w(TAG, "PowerManager was null, therefore the WakeLock was not created.");
Log.w(TAG, "PowerManager is null, therefore not creating the WakeLock.");
return;
}
wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, WAKE_LOCK_TAG);
wakeLock.setReferenceCounted(false);
}
}
@ -86,17 +88,14 @@ import com.google.android.exoplayer2.util.Log;
// reasonable timeout that would not affect the user.
@SuppressLint("WakelockTimeout")
private void updateWakeLock() {
// Needed for the library nullness check. If enabled is true, the wakelock will not be null.
if (wakeLock != null) {
if (enabled) {
if (stayAwake && !wakeLock.isHeld()) {
if (wakeLock == null) {
return;
}
if (enabled && stayAwake) {
wakeLock.acquire();
} else if (!stayAwake && wakeLock.isHeld()) {
wakeLock.release();
}
} else if (wakeLock.isHeld()) {
} else {
wakeLock.release();
}
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.content.Context;
import android.net.wifi.WifiManager;
import android.net.wifi.WifiManager.WifiLock;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Log;
/**
* Handles a {@link WifiLock}
*
* <p>The handling of wifi locks requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
*/
/* package */ final class WifiLockManager {
private static final String TAG = "WifiLockManager";
private static final String WIFI_LOCK_TAG = "ExoPlayer:WifiLockManager";
@Nullable private final WifiManager wifiManager;
@Nullable private WifiLock wifiLock;
private boolean enabled;
private boolean stayAwake;
public WifiLockManager(Context context) {
wifiManager =
(WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
}
/**
* Sets whether to enable the usage of a {@link WifiLock}.
*
* <p>By default, wifi lock handling is not enabled. Enabling will acquire the wifi lock if
* necessary. Disabling will release the wifi lock if held.
*
* <p>Enabling {@link WifiLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WifiLock}.
*/
public void setEnabled(boolean enabled) {
if (enabled && wifiLock == null) {
if (wifiManager == null) {
Log.w(TAG, "WifiManager is null, therefore not creating the WifiLock.");
return;
}
wifiLock = wifiManager.createWifiLock(WifiManager.WIFI_MODE_FULL_HIGH_PERF, WIFI_LOCK_TAG);
wifiLock.setReferenceCounted(false);
}
this.enabled = enabled;
updateWifiLock();
}
/**
* Sets whether to acquire or release the {@link WifiLock}.
*
* <p>The wifi lock will not be acquired unless handling has been enabled through {@link
* #setEnabled(boolean)}.
*
* @param stayAwake True if the player should acquire the {@link WifiLock}. False if it should
* release.
*/
public void setStayAwake(boolean stayAwake) {
this.stayAwake = stayAwake;
updateWifiLock();
}
private void updateWifiLock() {
if (wifiLock == null) {
return;
}
if (enabled && stayAwake) {
wifiLock.acquire();
} else {
wifiLock.release();
}
}
}

View File

@ -50,7 +50,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
* <p>For accurate measurements, the listener should be added to the player before loading media,
* i.e., {@link Player#getPlaybackState()} should be {@link Player#STATE_IDLE}.
*
* <p>Playback stats are gathered separately for all playback session, i.e. each window in the
* <p>Playback stats are gathered separately for each playback session, i.e. each window in the
* {@link Timeline} and each single ad.
*/
public final class PlaybackStatsListener
@ -931,6 +931,9 @@ public final class PlaybackStatsListener
}
private void maybeUpdateMediaTimeHistory(long realtimeMs, long mediaTimeMs) {
if (!keepHistory) {
return;
}
if (currentPlaybackState != PlaybackStats.PLAYBACK_STATE_PLAYING) {
if (mediaTimeMs == C.TIME_UNSET) {
return;

View File

@ -238,7 +238,7 @@ public final class DefaultAudioSink implements AudioSink {
@Nullable private final AudioCapabilities audioCapabilities;
private final AudioProcessorChain audioProcessorChain;
private final boolean enableConvertHighResIntPcmToFloat;
private final boolean enableFloatOutput;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor;
private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
@ -299,7 +299,7 @@ public final class DefaultAudioSink implements AudioSink {
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) {
this(audioCapabilities, audioProcessors, /* enableConvertHighResIntPcmToFloat= */ false);
this(audioCapabilities, audioProcessors, /* enableFloatOutput= */ false);
}
/**
@ -309,19 +309,16 @@ public final class DefaultAudioSink implements AudioSink {
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution
* integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer
* audio processing (for example, speed and pitch adjustment) will not be available when float
* output is in use.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* be available when float output is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors,
boolean enableConvertHighResIntPcmToFloat) {
this(
audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors),
enableConvertHighResIntPcmToFloat);
boolean enableFloatOutput) {
this(audioCapabilities, new DefaultAudioProcessorChain(audioProcessors), enableFloatOutput);
}
/**
@ -332,18 +329,18 @@ public final class DefaultAudioSink implements AudioSink {
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback
* parameters adjustments. The instance passed in must not be reused in other sinks.
* @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution
* integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer
* audio processing (for example, speed and pitch adjustment) will not be available when float
* output is in use.
* @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* output will be used if the input is 32-bit float, and also if the input is high resolution
* (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* be available when float output is in use.
*/
public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain,
boolean enableConvertHighResIntPcmToFloat) {
boolean enableFloatOutput) {
this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableConvertHighResIntPcmToFloat = enableConvertHighResIntPcmToFloat;
this.enableFloatOutput = enableFloatOutput;
releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
@ -422,37 +419,34 @@ public final class DefaultAudioSink implements AudioSink {
}
boolean isInputPcm = Util.isEncodingLinearPcm(inputEncoding);
boolean processingEnabled = isInputPcm && inputEncoding != C.ENCODING_PCM_FLOAT;
boolean processingEnabled = isInputPcm;
int sampleRate = inputSampleRate;
int channelCount = inputChannelCount;
@C.Encoding int encoding = inputEncoding;
boolean shouldConvertHighResIntPcmToFloat =
enableConvertHighResIntPcmToFloat
boolean useFloatOutput =
enableFloatOutput
&& supportsOutput(inputChannelCount, C.ENCODING_PCM_FLOAT)
&& Util.isEncodingHighResolutionIntegerPcm(inputEncoding);
&& Util.isEncodingHighResolutionPcm(inputEncoding);
AudioProcessor[] availableAudioProcessors =
shouldConvertHighResIntPcmToFloat
? toFloatPcmAvailableAudioProcessors
: toIntPcmAvailableAudioProcessors;
useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
if (processingEnabled) {
trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames);
channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat inputAudioFormat =
AudioProcessor.AudioFormat outputFormat =
new AudioProcessor.AudioFormat(sampleRate, channelCount, encoding);
AudioProcessor.AudioFormat outputAudioFormat = inputAudioFormat;
for (AudioProcessor audioProcessor : availableAudioProcessors) {
try {
outputAudioFormat = audioProcessor.configure(inputAudioFormat);
AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
if (audioProcessor.isActive()) {
outputFormat = nextFormat;
}
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e);
}
if (audioProcessor.isActive()) {
inputAudioFormat = outputAudioFormat;
}
}
sampleRate = outputAudioFormat.sampleRate;
channelCount = outputAudioFormat.channelCount;
encoding = outputAudioFormat.encoding;
sampleRate = outputFormat.sampleRate;
channelCount = outputFormat.channelCount;
encoding = outputFormat.encoding;
}
int outputChannelConfig = getChannelConfig(channelCount, isInputPcm);
@ -464,7 +458,7 @@ public final class DefaultAudioSink implements AudioSink {
isInputPcm ? Util.getPcmFrameSize(inputEncoding, inputChannelCount) : C.LENGTH_UNSET;
int outputPcmFrameSize =
isInputPcm ? Util.getPcmFrameSize(encoding, channelCount) : C.LENGTH_UNSET;
boolean canApplyPlaybackParameters = processingEnabled && !shouldConvertHighResIntPcmToFloat;
boolean canApplyPlaybackParameters = processingEnabled && !useFloatOutput;
Configuration pendingConfiguration =
new Configuration(
isInputPcm,

View File

@ -16,13 +16,19 @@
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer;
/**
* An {@link AudioProcessor} that converts 24-bit and 32-bit integer PCM audio to 32-bit float PCM
* audio.
* An {@link AudioProcessor} that converts high resolution PCM audio to 32-bit float. The following
* encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT} ({@link #isActive()} will return {@code false})
* </ul>
*/
/* package */ final class FloatResamplingAudioProcessor extends BaseAudioProcessor {
@ -32,10 +38,11 @@ import java.nio.ByteBuffer;
@Override
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
if (!Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding)) {
@C.PcmEncoding int encoding = inputAudioFormat.encoding;
if (!Util.isEncodingHighResolutionPcm(encoding)) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
return Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding)
return encoding != C.ENCODING_PCM_FLOAT
? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_FLOAT)
: AudioFormat.NOT_SET;
@ -43,15 +50,24 @@ import java.nio.ByteBuffer;
@Override
public void queueInput(ByteBuffer inputBuffer) {
Assertions.checkState(Util.isEncodingHighResolutionIntegerPcm(inputAudioFormat.encoding));
boolean isInput32Bit = inputAudioFormat.encoding == C.ENCODING_PCM_32BIT;
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
int resampledSize = isInput32Bit ? size : (size / 3) * 4;
ByteBuffer buffer = replaceOutputBuffer(resampledSize);
if (isInput32Bit) {
ByteBuffer buffer;
switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_24BIT:
buffer = replaceOutputBuffer((size / 3) * 4);
for (int i = position; i < limit; i += 3) {
int pcm32BitInteger =
((inputBuffer.get(i) & 0xFF) << 8)
| ((inputBuffer.get(i + 1) & 0xFF) << 16)
| ((inputBuffer.get(i + 2) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
break;
case C.ENCODING_PCM_32BIT:
buffer = replaceOutputBuffer(size);
for (int i = position; i < limit; i += 4) {
int pcm32BitInteger =
(inputBuffer.get(i) & 0xFF)
@ -60,14 +76,16 @@ import java.nio.ByteBuffer;
| ((inputBuffer.get(i + 3) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
} else { // Input is 24-bit PCM.
for (int i = position; i < limit; i += 3) {
int pcm32BitInteger =
((inputBuffer.get(i) & 0xFF) << 8)
| ((inputBuffer.get(i + 1) & 0xFF) << 16)
| ((inputBuffer.get(i + 2) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
break;
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
// Never happens.
throw new IllegalStateException();
}
inputBuffer.position(inputBuffer.limit());

View File

@ -20,8 +20,17 @@ import com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer;
/**
* An {@link AudioProcessor} that converts 8-bit, 24-bit and 32-bit integer PCM audio to 16-bit
* integer PCM audio.
* An {@link AudioProcessor} that converts different PCM audio encodings to 16-bit integer PCM. The
* following encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_8BIT}
* <li>{@link C#ENCODING_PCM_16BIT} ({@link #isActive()} will return {@code false})
* <li>{@link C#ENCODING_PCM_16BIT_BIG_ENDIAN}
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT}
* </ul>
*/
/* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor {
@ -33,7 +42,8 @@ import java.nio.ByteBuffer;
&& encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_16BIT_BIG_ENDIAN
&& encoding != C.ENCODING_PCM_24BIT
&& encoding != C.ENCODING_PCM_32BIT) {
&& encoding != C.ENCODING_PCM_32BIT
&& encoding != C.ENCODING_PCM_FLOAT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
return encoding != C.ENCODING_PCM_16BIT
@ -60,10 +70,10 @@ import java.nio.ByteBuffer;
resampledSize = (size / 3) * 2;
break;
case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_FLOAT:
resampledSize = size / 2;
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
@ -101,8 +111,16 @@ import java.nio.ByteBuffer;
buffer.put(inputBuffer.get(i + 3));
}
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
// 32 bit floating point -> 16 bit resampling. Floating point values are in the range
// [-1.0, 1.0], so need to be scaled by Short.MAX_VALUE.
for (int i = position; i < limit; i += 4) {
short value = (short) (inputBuffer.getFloat(i) * Short.MAX_VALUE);
buffer.put((byte) (value & 0xFF));
buffer.put((byte) ((value >> 8) & 0xFF));
}
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:

View File

@ -155,15 +155,16 @@ import java.nio.ByteBuffer;
@Override
protected void onFlush() {
if (reconfigurationPending) {
// This is the initial flush after reconfiguration. Prepare to trim bytes from the start/end.
reconfigurationPending = false;
endBuffer = new byte[trimEndFrames * inputAudioFormat.bytesPerFrame];
pendingTrimStartBytes = trimStartFrames * inputAudioFormat.bytesPerFrame;
} else {
// Audio processors are flushed after initial configuration, so we leave the pending trim
// start byte count unmodified if the processor was just configured. Otherwise we (possibly
// incorrectly) assume that this is a seek to a non-zero position. We should instead check the
// timestamp of the first input buffer queued after flushing to decide whether to trim (see
// also [Internal: b/77292509]).
// This is a flush during playback (after the initial flush). We assume this was caused by a
// seek to a non-zero position and clear pending start bytes. This assumption may be wrong (we
// may be seeking to zero), but playing data that should have been trimmed shouldn't be
// noticeable after a seek. Ideally we would check the timestamp of the first input buffer
// queued after flushing to decide whether to trim (see also [Internal: b/77292509]).
pendingTrimStartBytes = 0;
}
endBufferSize = 0;

View File

@ -61,6 +61,7 @@ public final class WavUtil {
return TYPE_PCM;
case C.ENCODING_PCM_FLOAT:
return TYPE_FLOAT;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN: // Not TYPE_PCM, because TYPE_PCM is little endian.
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:

View File

@ -63,6 +63,14 @@ public class DecoderInputBuffer extends Buffer {
/** The buffer's data, or {@code null} if no data has been set. */
@Nullable public ByteBuffer data;
// TODO: Remove this temporary signaling once end-of-stream propagation for clips using content
// protection is fixed. See [Internal: b/153326944] for details.
/**
* Whether the last attempt to read a sample into this buffer failed due to not yet having the DRM
* keys associated with the next sample.
*/
public boolean waitingForKeys;
/**
* The time at which the sample should be presented.
*/
@ -137,6 +145,7 @@ public class DecoderInputBuffer extends Buffer {
}
// Instantiate a new buffer if possible.
ByteBuffer newData = createReplacementByteBuffer(requiredCapacity);
newData.order(data.order());
// Copy data up to the current position from the old buffer to the new one.
if (position > 0) {
data.flip();
@ -182,6 +191,7 @@ public class DecoderInputBuffer extends Buffer {
if (supplementalData != null) {
supplementalData.clear();
}
waitingForKeys = false;
}
private ByteBuffer createReplacementByteBuffer(int requiredCapacity) {

View File

@ -149,6 +149,7 @@ public abstract class SimpleDecoder<
while (!queuedOutputBuffers.isEmpty()) {
queuedOutputBuffers.removeFirst().release();
}
exception = null;
}
}
@ -225,6 +226,7 @@ public abstract class SimpleDecoder<
if (inputBuffer.isDecodeOnly()) {
outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
}
@Nullable E exception;
try {
exception = decode(inputBuffer, outputBuffer, resetDecoder);
} catch (RuntimeException e) {
@ -238,8 +240,9 @@ public abstract class SimpleDecoder<
exception = createUnexpectedDecodeException(e);
}
if (exception != null) {
// Memory barrier to ensure that the decoder exception is visible from the playback thread.
synchronized (lock) {}
synchronized (lock) {
this.exception = exception;
}
return false;
}
}

View File

@ -341,14 +341,20 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
C.PLAYREADY_UUID, addLaUrlAttributeIfMissing(schemeSpecificData));
}
// Prior to L the Widevine CDM required data to be extracted from the PSSH atom. Some Amazon
// devices also required data to be extracted from the PSSH atom for PlayReady.
if ((Util.SDK_INT < 21 && C.WIDEVINE_UUID.equals(uuid))
// Prior to API level 21, the Widevine CDM required scheme specific data to be extracted from
// the PSSH atom. We also extract the data on API levels 21 and 22 because these API levels
// don't handle V1 PSSH atoms, but do handle scheme specific data regardless of whether it's
// extracted from a V0 or a V1 PSSH atom. Hence extracting the data allows us to support content
// that only provides V1 PSSH atoms. API levels 23 and above understand V0 and V1 PSSH atoms,
// and so we do not extract the data.
// Some Amazon devices also require data to be extracted from the PSSH atom for PlayReady.
if ((Util.SDK_INT < 23 && C.WIDEVINE_UUID.equals(uuid))
|| (C.PLAYREADY_UUID.equals(uuid)
&& "Amazon".equals(Util.MANUFACTURER)
&& ("AFTB".equals(Util.MODEL) // Fire TV Gen 1
|| "AFTS".equals(Util.MODEL) // Fire TV Gen 2
|| "AFTM".equals(Util.MODEL)))) { // Fire TV Stick Gen 1
|| "AFTM".equals(Util.MODEL) // Fire TV Stick Gen 1
|| "AFTT".equals(Util.MODEL)))) { // Fire TV Stick Gen 2
byte[] psshData = PsshAtomUtil.parseSchemeSpecificData(initData, uuid);
if (psshData != null) {
// Extraction succeeded, so return the extracted data.

View File

@ -433,10 +433,15 @@ import java.util.List;
long editDuration =
Util.scaleLargeTimestamp(
track.editListDurations[i], track.timescale, track.movieTimescale);
startIndices[i] = Util.binarySearchCeil(timestamps, editMediaTime, true, true);
startIndices[i] =
Util.binarySearchFloor(
timestamps, editMediaTime, /* inclusive= */ true, /* stayInBounds= */ true);
endIndices[i] =
Util.binarySearchCeil(
timestamps, editMediaTime + editDuration, omitClippedSample, false);
timestamps,
editMediaTime + editDuration,
/* inclusive= */ omitClippedSample,
/* stayInBounds= */ false);
while (startIndices[i] < endIndices[i]
&& (flags[startIndices[i]] & C.BUFFER_FLAG_KEY_FRAME) == 0) {
// Applying the edit correctly would require prerolling from the previous sync sample. In
@ -474,7 +479,7 @@ import java.util.List;
long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale);
long timeInSegmentUs =
Util.scaleLargeTimestamp(
timestamps[j] - editMediaTime, C.MICROS_PER_SECOND, track.timescale);
Math.max(0, timestamps[j] - editMediaTime), C.MICROS_PER_SECOND, track.timescale);
editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs;
if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) {
editedMaximumSize = sizes[j];

View File

@ -345,42 +345,44 @@ public final class AdtsReader implements ElementaryStreamReader {
}
/**
* Returns whether the given syncPositionCandidate is a real SYNC word.
*
* <p>SYNC word pattern can occur within AAC data, so we perform a few checks to make sure this is
* really a SYNC word. This includes:
* Checks whether a candidate SYNC word position is likely to be the position of a real SYNC word.
* The caller must check that the first byte of the SYNC word is 0xFF before calling this method.
* This method performs the following checks:
*
* <ul>
* <li>Checking if MPEG version of this frame matches the first detected version.
* <li>Checking if the sample rate index of this frame matches the first detected sample rate
* index.
* <li>Checking if the bytes immediately after the current package also match a SYNC-word.
* <li>The MPEG version of this frame must match the previously detected version.
* <li>The sample rate index of this frame must match the previously detected sample rate index.
* <li>The frame size must be at least 7 bytes
* <li>The bytes following the frame must be either another SYNC word with the same MPEG
* version, or the start of an ID3 header.
* </ul>
*
* If the buffer runs out of data for any check, optimistically skip that check, because
* AdtsReader consumes each buffer as a whole. We will still run a header validity check later.
* With the exception of the first check, if there is insufficient data in the buffer then checks
* are optimistically skipped and {@code true} is returned.
*
* @param pesBuffer The buffer containing at data to check.
* @param syncPositionCandidate The candidate SYNC word position. May be -1 if the first byte of
* the candidate was the last byte of the previously consumed buffer.
* @return True if all checks were passed or skipped, indicating the position is likely to be the
* position of a real SYNC word. False otherwise.
*/
private boolean checkSyncPositionValid(ParsableByteArray pesBuffer, int syncPositionCandidate) {
// The SYNC word contains 2 bytes, and the first byte may be in the previously consumed buffer.
// Hence the second byte of the SYNC word may be byte 0 of this buffer, and
// syncPositionCandidate (which indicates position of the first byte of the SYNC word) may be
// -1.
// Since the first byte of the SYNC word is always FF, which does not contain any informational
// bits, we set the byte position to be the second byte in the SYNC word to ensure it's always
// within this buffer.
pesBuffer.setPosition(syncPositionCandidate + 1);
if (!tryRead(pesBuffer, adtsScratch.data, 1)) {
return false;
}
// The MPEG version of this frame must match the previously detected version.
adtsScratch.setPosition(4);
int currentFrameVersion = adtsScratch.readBits(1);
if (firstFrameVersion != VERSION_UNSET && currentFrameVersion != firstFrameVersion) {
return false;
}
// The sample rate index of this frame must match the previously detected sample rate index.
if (firstFrameSampleRateIndex != C.INDEX_UNSET) {
if (!tryRead(pesBuffer, adtsScratch.data, 1)) {
// Insufficient data for further checks.
return true;
}
adtsScratch.setPosition(2);
@ -391,24 +393,50 @@ public final class AdtsReader implements ElementaryStreamReader {
pesBuffer.setPosition(syncPositionCandidate + 2);
}
// Optionally check the byte after this frame matches SYNC word.
// The frame size must be at least 7 bytes.
if (!tryRead(pesBuffer, adtsScratch.data, 4)) {
// Insufficient data for further checks.
return true;
}
adtsScratch.setPosition(14);
int frameSize = adtsScratch.readBits(13);
if (frameSize <= 6) {
// Not a frame.
if (frameSize < 7) {
return false;
}
// The bytes following the frame must be either another SYNC word with the same MPEG version, or
// the start of an ID3 header.
byte[] data = pesBuffer.data;
int dataLimit = pesBuffer.limit();
int nextSyncPosition = syncPositionCandidate + frameSize;
if (nextSyncPosition + 1 >= pesBuffer.limit()) {
if (nextSyncPosition >= dataLimit) {
// Insufficient data for further checks.
return true;
}
return (isAdtsSyncBytes(pesBuffer.data[nextSyncPosition], pesBuffer.data[nextSyncPosition + 1])
&& (firstFrameVersion == VERSION_UNSET
|| ((pesBuffer.data[nextSyncPosition + 1] & 0x8) >> 3) == currentFrameVersion));
if (data[nextSyncPosition] == (byte) 0xFF) {
if (nextSyncPosition + 1 == dataLimit) {
// Insufficient data for further checks.
return true;
}
return isAdtsSyncBytes((byte) 0xFF, data[nextSyncPosition + 1])
&& ((data[nextSyncPosition + 1] & 0x8) >> 3) == currentFrameVersion;
} else {
if (data[nextSyncPosition] != 'I') {
return false;
}
if (nextSyncPosition + 1 == dataLimit) {
// Insufficient data for further checks.
return true;
}
if (data[nextSyncPosition + 1] != 'D') {
return false;
}
if (nextSyncPosition + 2 == dataLimit) {
// Insufficient data for further checks.
return true;
}
return data[nextSyncPosition + 2] == '3';
}
}
private boolean isAdtsSyncBytes(byte firstByte, byte secondByte) {

View File

@ -259,14 +259,14 @@ public final class WavExtractor implements Extractor {
public boolean sampleData(ExtractorInput input, long bytesLeft)
throws IOException, InterruptedException {
// Write sample data until we've reached the target sample size, or the end of the data.
boolean endOfSampleData = bytesLeft == 0;
while (!endOfSampleData && pendingOutputBytes < targetSampleSizeBytes) {
while (bytesLeft > 0 && pendingOutputBytes < targetSampleSizeBytes) {
int bytesToRead = (int) Math.min(targetSampleSizeBytes - pendingOutputBytes, bytesLeft);
int bytesAppended = trackOutput.sampleData(input, bytesToRead, true);
if (bytesAppended == RESULT_END_OF_INPUT) {
endOfSampleData = true;
bytesLeft = 0;
} else {
pendingOutputBytes += bytesAppended;
bytesLeft -= bytesAppended;
}
}
@ -288,7 +288,7 @@ public final class WavExtractor implements Extractor {
pendingOutputBytes = offset;
}
return endOfSampleData;
return bytesLeft <= 0;
}
}

View File

@ -344,6 +344,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
private boolean codecNeedsReconfigureWorkaround;
private boolean codecNeedsDiscardToSpsWorkaround;
private boolean codecNeedsFlushWorkaround;
private boolean codecNeedsSosFlushWorkaround;
private boolean codecNeedsEosFlushWorkaround;
private boolean codecNeedsEosOutputExceptionWorkaround;
private boolean codecNeedsMonoChannelCountWorkaround;
@ -364,6 +365,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
@DrainAction private int codecDrainAction;
private boolean codecReceivedBuffers;
private boolean codecReceivedEos;
private boolean codecHasOutputMediaFormat;
private long largestQueuedPresentationTimeUs;
private long lastBufferInStreamPresentationTimeUs;
private boolean inputStreamEnded;
@ -652,6 +654,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
availableCodecInfos = null;
codecInfo = null;
codecFormat = null;
codecHasOutputMediaFormat = false;
resetInputBuffer();
resetOutputBuffer();
resetCodecBuffers();
@ -765,6 +768,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
}
if (codecDrainAction == DRAIN_ACTION_REINITIALIZE
|| codecNeedsFlushWorkaround
|| (codecNeedsSosFlushWorkaround && !codecHasOutputMediaFormat)
|| (codecNeedsEosFlushWorkaround && codecReceivedEos)) {
releaseCodec();
return true;
@ -944,6 +948,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecNeedsReconfigureWorkaround = codecNeedsReconfigureWorkaround(codecName);
codecNeedsDiscardToSpsWorkaround = codecNeedsDiscardToSpsWorkaround(codecName, codecFormat);
codecNeedsFlushWorkaround = codecNeedsFlushWorkaround(codecName);
codecNeedsSosFlushWorkaround = codecNeedsSosFlushWorkaround(codecName);
codecNeedsEosFlushWorkaround = codecNeedsEosFlushWorkaround(codecName);
codecNeedsEosOutputExceptionWorkaround = codecNeedsEosOutputExceptionWorkaround(codecName);
codecNeedsMonoChannelCountWorkaround =
@ -1610,6 +1615,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
/** Processes a new output {@link MediaFormat}. */
private void processOutputFormat() throws ExoPlaybackException {
codecHasOutputMediaFormat = true;
MediaFormat mediaFormat = codec.getOutputFormat();
if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER
&& mediaFormat.getInteger(MediaFormat.KEY_WIDTH) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT
@ -1989,4 +1995,20 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
&& "OMX.MTK.AUDIO.DECODER.MP3".equals(name);
}
/**
* Returns whether the decoder is known to behave incorrectly if flushed prior to having output a
* {@link MediaFormat}.
*
* <p>If true is returned, the renderer will work around the issue by instantiating a new decoder
* when this case occurs.
*
* <p>See [Internal: b/141097367].
*
* @param name The name of the decoder.
* @return True if the decoder is known to behave incorrectly if flushed prior to having output a
* {@link MediaFormat}. False otherwise.
*/
private static boolean codecNeedsSosFlushWorkaround(String name) {
return Util.SDK_INT == 29 && "c2.android.aac.decoder".equals(name);
}
}

View File

@ -25,6 +25,7 @@ import android.util.Pair;
import android.util.SparseIntArray;
import androidx.annotation.CheckResult;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Log;
@ -289,9 +290,16 @@ public final class MediaCodecUtil {
// Note: MediaCodecList is sorted by the framework such that the best decoders come first.
for (int i = 0; i < numberOfCodecs; i++) {
android.media.MediaCodecInfo codecInfo = mediaCodecList.getCodecInfoAt(i);
if (isAlias(codecInfo)) {
// Skip aliases of other codecs, since they will also be listed under their canonical
// names.
continue;
}
String name = codecInfo.getName();
@Nullable
String codecMimeType = getCodecMimeType(codecInfo, name, secureDecodersExplicit, mimeType);
if (!isCodecUsableDecoder(codecInfo, name, secureDecodersExplicit, mimeType)) {
continue;
}
@Nullable String codecMimeType = getCodecMimeType(codecInfo, name, mimeType);
if (codecMimeType == null) {
continue;
}
@ -373,7 +381,6 @@ public final class MediaCodecUtil {
*
* @param info The codec information.
* @param name The name of the codec
* @param secureDecodersExplicit Whether secure decoders were explicitly listed, if present.
* @param mimeType The MIME type.
* @return The codec's supported MIME type for media of type {@code mimeType}, or {@code null} if
* the codec can't be used. If non-null, the returned type will be equal to {@code mimeType}
@ -383,12 +390,7 @@ public final class MediaCodecUtil {
private static String getCodecMimeType(
android.media.MediaCodecInfo info,
String name,
boolean secureDecodersExplicit,
String mimeType) {
if (!isCodecUsableDecoder(info, name, secureDecodersExplicit, mimeType)) {
return null;
}
String[] supportedTypes = info.getSupportedTypes();
for (String supportedType : supportedTypes) {
if (supportedType.equalsIgnoreCase(mimeType)) {
@ -591,6 +593,15 @@ public final class MediaCodecUtil {
}
}
private static boolean isAlias(android.media.MediaCodecInfo info) {
return Util.SDK_INT >= 29 && isAliasV29(info);
}
@RequiresApi(29)
private static boolean isAliasV29(android.media.MediaCodecInfo info) {
return info.isAlias();
}
/**
* The result of {@link android.media.MediaCodecInfo#isHardwareAccelerated()} for API levels 29+,
* or a best-effort approximation for lower levels.

View File

@ -245,8 +245,8 @@ public final class DownloadHelper {
* @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest.
* @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are
* selected.
* @param drmSessionManager An optional {@link DrmSessionManager} used by the renderers created by
* {@code renderersFactory}.
* @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which
* tracks can be selected.
* @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for
* downloading.
* @return A {@link DownloadHelper} for DASH streams.
@ -315,8 +315,8 @@ public final class DownloadHelper {
* @param dataSourceFactory A {@link DataSource.Factory} used to load the playlist.
* @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are
* selected.
* @param drmSessionManager An optional {@link DrmSessionManager} used by the renderers created by
* {@code renderersFactory}.
* @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which
* tracks can be selected.
* @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for
* downloading.
* @return A {@link DownloadHelper} for HLS streams.
@ -385,8 +385,8 @@ public final class DownloadHelper {
* @param dataSourceFactory A {@link DataSource.Factory} used to load the manifest.
* @param renderersFactory A {@link RenderersFactory} creating the renderers for which tracks are
* selected.
* @param drmSessionManager An optional {@link DrmSessionManager} used by the renderers created by
* {@code renderersFactory}.
* @param drmSessionManager An optional {@link DrmSessionManager}. Used to help determine which
* tracks can be selected.
* @param trackSelectorParameters {@link DefaultTrackSelector.Parameters} for selecting tracks for
* downloading.
* @return A {@link DownloadHelper} for SmoothStreaming streams.
@ -414,27 +414,27 @@ public final class DownloadHelper {
/**
* Equivalent to {@link #createMediaSource(DownloadRequest, Factory, DrmSessionManager)
* createMediaSource(downloadRequest, dataSourceFactory,
* DrmSessionManager.getDummyDrmSessionManager())}.
* createMediaSource(downloadRequest, dataSourceFactory, null)}.
*/
public static MediaSource createMediaSource(
DownloadRequest downloadRequest, DataSource.Factory dataSourceFactory) {
return createMediaSource(
downloadRequest, dataSourceFactory, DrmSessionManager.getDummyDrmSessionManager());
return createMediaSource(downloadRequest, dataSourceFactory, /* drmSessionManager= */ null);
}
/**
* Utility method to create a MediaSource which only contains the tracks defined in {@code
* Utility method to create a {@link MediaSource} that only exposes the tracks defined in {@code
* downloadRequest}.
*
* @param downloadRequest A {@link DownloadRequest}.
* @param dataSourceFactory A factory for {@link DataSource}s to read the media.
* @return A MediaSource which only contains the tracks defined in {@code downloadRequest}.
* @param drmSessionManager An optional {@link DrmSessionManager} to be passed to the {@link
* MediaSource}.
* @return A {@link MediaSource} that only exposes the tracks defined in {@code downloadRequest}.
*/
public static MediaSource createMediaSource(
DownloadRequest downloadRequest,
DataSource.Factory dataSourceFactory,
DrmSessionManager<?> drmSessionManager) {
@Nullable DrmSessionManager<?> drmSessionManager) {
@Nullable Constructor<? extends MediaSourceFactory> constructor;
switch (downloadRequest.type) {
case DownloadRequest.TYPE_DASH:

View File

@ -595,7 +595,7 @@ public abstract class DownloadService extends Service {
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
public int onStartCommand(@Nullable Intent intent, int flags, int startId) {
lastStartId = startId;
taskRemoved = false;
@Nullable String intentAction = null;
@ -617,7 +617,9 @@ public abstract class DownloadService extends Service {
// Do nothing.
break;
case ACTION_ADD_DOWNLOAD:
@Nullable DownloadRequest downloadRequest = intent.getParcelableExtra(KEY_DOWNLOAD_REQUEST);
@Nullable
DownloadRequest downloadRequest =
Assertions.checkNotNull(intent).getParcelableExtra(KEY_DOWNLOAD_REQUEST);
if (downloadRequest == null) {
Log.e(TAG, "Ignored ADD_DOWNLOAD: Missing " + KEY_DOWNLOAD_REQUEST + " extra");
} else {
@ -642,7 +644,7 @@ public abstract class DownloadService extends Service {
downloadManager.pauseDownloads();
break;
case ACTION_SET_STOP_REASON:
if (!intent.hasExtra(KEY_STOP_REASON)) {
if (!Assertions.checkNotNull(intent).hasExtra(KEY_STOP_REASON)) {
Log.e(TAG, "Ignored SET_STOP_REASON: Missing " + KEY_STOP_REASON + " extra");
} else {
int stopReason = intent.getIntExtra(KEY_STOP_REASON, /* defaultValue= */ 0);
@ -650,7 +652,9 @@ public abstract class DownloadService extends Service {
}
break;
case ACTION_SET_REQUIREMENTS:
@Nullable Requirements requirements = intent.getParcelableExtra(KEY_REQUIREMENTS);
@Nullable
Requirements requirements =
Assertions.checkNotNull(intent).getParcelableExtra(KEY_REQUIREMENTS);
if (requirements == null) {
Log.e(TAG, "Ignored SET_REQUIREMENTS: Missing " + KEY_REQUIREMENTS + " extra");
} else {

View File

@ -324,7 +324,8 @@ public final class ClippingMediaPeriod implements MediaPeriod, MediaPeriod.Callb
if (endUs != C.TIME_END_OF_SOURCE
&& ((result == C.RESULT_BUFFER_READ && buffer.timeUs >= endUs)
|| (result == C.RESULT_NOTHING_READ
&& getBufferedPositionUs() == C.TIME_END_OF_SOURCE))) {
&& getBufferedPositionUs() == C.TIME_END_OF_SOURCE
&& !buffer.waitingForKeys))) {
buffer.clear();
buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
sentEos = true;

View File

@ -539,7 +539,7 @@ public class SampleQueue implements TrackOutput {
boolean loadingFinished,
long decodeOnlyUntilUs,
SampleExtrasHolder extrasHolder) {
buffer.waitingForKeys = false;
// This is a temporary fix for https://github.com/google/ExoPlayer/issues/6155.
// TODO: Remove it and replace it with a fix that discards samples when writing to the queue.
boolean hasNextSample;
@ -573,6 +573,7 @@ public class SampleQueue implements TrackOutput {
}
if (!mayReadSample(relativeReadIndex)) {
buffer.waitingForKeys = true;
return C.RESULT_NOTHING_READ;
}

View File

@ -23,11 +23,11 @@ import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.BaseRenderer;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented;
@ -45,6 +45,8 @@ import java.util.List;
*/
public final class TextRenderer extends BaseRenderer implements Callback {
private static final String TAG = "TextRenderer";
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
@ -143,19 +145,13 @@ public final class TextRenderer extends BaseRenderer implements Callback {
@Override
protected void onPositionReset(long positionUs, boolean joining) {
clearOutput();
inputStreamEnded = false;
outputStreamEnded = false;
if (decoderReplacementState != REPLACEMENT_STATE_NONE) {
replaceDecoder();
} else {
releaseBuffers();
decoder.flush();
}
resetOutputAndDecoder();
}
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
public void render(long positionUs, long elapsedRealtimeUs) {
if (outputStreamEnded) {
return;
}
@ -165,7 +161,8 @@ public final class TextRenderer extends BaseRenderer implements Callback {
try {
nextSubtitle = decoder.dequeueOutputBuffer();
} catch (SubtitleDecoderException e) {
throw createRendererException(e, streamFormat);
handleDecoderError(e);
return;
}
}
@ -247,7 +244,8 @@ public final class TextRenderer extends BaseRenderer implements Callback {
}
}
} catch (SubtitleDecoderException e) {
throw createRendererException(e, streamFormat);
handleDecoderError(e);
return;
}
}
@ -329,4 +327,24 @@ public final class TextRenderer extends BaseRenderer implements Callback {
output.onCues(cues);
}
/**
* Called when {@link #decoder} throws an exception, so it can be logged and playback can
* continue.
*
* <p>Logs {@code e} and resets state to allow decoding the next sample.
*/
private void handleDecoderError(SubtitleDecoderException e) {
Log.e(TAG, "Subtitle decoding failed. streamFormat=" + streamFormat, e);
resetOutputAndDecoder();
}
private void resetOutputAndDecoder() {
clearOutput();
if (decoderReplacementState != REPLACEMENT_STATE_NONE) {
replaceDecoder();
} else {
releaseBuffers();
decoder.flush();
}
}
}

View File

@ -41,10 +41,12 @@ public final class SubripDecoder extends SimpleSubtitleDecoder {
private static final String TAG = "SubripDecoder";
private static final String SUBRIP_TIMECODE = "(?:(\\d+):)?(\\d+):(\\d+),(\\d+)";
// Some SRT files don't include hours or milliseconds in the timecode, so we use optional groups.
private static final String SUBRIP_TIMECODE = "(?:(\\d+):)?(\\d+):(\\d+)(?:,(\\d+))?";
private static final Pattern SUBRIP_TIMING_LINE =
Pattern.compile("\\s*(" + SUBRIP_TIMECODE + ")\\s*-->\\s*(" + SUBRIP_TIMECODE + ")\\s*");
// NOTE: Android Studio's suggestion to simplify '\\}' is incorrect [internal: b/144480183].
private static final Pattern SUBRIP_TAG_PATTERN = Pattern.compile("\\{\\\\.*?\\}");
private static final String SUBRIP_ALIGNMENT_TAG = "\\{\\\\an[1-9]\\}";
@ -229,10 +231,14 @@ public final class SubripDecoder extends SimpleSubtitleDecoder {
}
private static long parseTimecode(Matcher matcher, int groupOffset) {
long timestampMs = Long.parseLong(matcher.group(groupOffset + 1)) * 60 * 60 * 1000;
@Nullable String hours = matcher.group(groupOffset + 1);
long timestampMs = hours != null ? Long.parseLong(hours) * 60 * 60 * 1000 : 0;
timestampMs += Long.parseLong(matcher.group(groupOffset + 2)) * 60 * 1000;
timestampMs += Long.parseLong(matcher.group(groupOffset + 3)) * 1000;
timestampMs += Long.parseLong(matcher.group(groupOffset + 4));
@Nullable String millis = matcher.group(groupOffset + 4);
if (millis != null) {
timestampMs += Long.parseLong(millis);
}
return timestampMs * 1000;
}

View File

@ -279,8 +279,8 @@ public class DefaultHttpDataSource extends BaseDataSource implements HttpDataSou
try {
connection = makeConnection(dataSpec);
} catch (IOException e) {
throw new HttpDataSourceException("Unable to connect to " + dataSpec.uri.toString(), e,
dataSpec, HttpDataSourceException.TYPE_OPEN);
throw new HttpDataSourceException(
"Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN);
}
String responseMessage;
@ -289,8 +289,8 @@ public class DefaultHttpDataSource extends BaseDataSource implements HttpDataSou
responseMessage = connection.getResponseMessage();
} catch (IOException e) {
closeConnectionQuietly();
throw new HttpDataSourceException("Unable to connect to " + dataSpec.uri.toString(), e,
dataSpec, HttpDataSourceException.TYPE_OPEN);
throw new HttpDataSourceException(
"Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN);
}
// Check for a valid response code.

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer2.util;
import android.os.SystemClock;
import android.text.TextUtils;
import android.view.Surface;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
@ -198,7 +199,7 @@ public class EventLogger implements AnalyticsListener {
logd(eventTime, "tracks", "[]");
return;
}
logd("tracks [" + getEventTimeString(eventTime) + ", ");
logd("tracks [" + getEventTimeString(eventTime));
// Log tracks associated to renderers.
int rendererCount = mappedTrackInfo.getRendererCount();
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
@ -282,7 +283,7 @@ public class EventLogger implements AnalyticsListener {
@Override
public void onMetadata(EventTime eventTime, Metadata metadata) {
logd("metadata [" + getEventTimeString(eventTime) + ", ");
logd("metadata [" + getEventTimeString(eventTime));
printMetadata(metadata, " ");
logd("]");
}
@ -469,27 +470,26 @@ public class EventLogger implements AnalyticsListener {
}
/**
* Logs an error message and exception.
* Logs an error message.
*
* @param msg The message to log.
* @param tr The exception to log.
*/
protected void loge(String msg, @Nullable Throwable tr) {
Log.e(tag, msg, tr);
protected void loge(String msg) {
Log.e(tag, msg);
}
// Internal methods
private void logd(EventTime eventTime, String eventName) {
logd(getEventString(eventTime, eventName));
logd(getEventString(eventTime, eventName, /* eventDescription= */ null, /* throwable= */ null));
}
private void logd(EventTime eventTime, String eventName, String eventDescription) {
logd(getEventString(eventTime, eventName, eventDescription));
logd(getEventString(eventTime, eventName, eventDescription, /* throwable= */ null));
}
private void loge(EventTime eventTime, String eventName, @Nullable Throwable throwable) {
loge(getEventString(eventTime, eventName), throwable);
loge(getEventString(eventTime, eventName, /* eventDescription= */ null, throwable));
}
private void loge(
@ -497,7 +497,7 @@ public class EventLogger implements AnalyticsListener {
String eventName,
String eventDescription,
@Nullable Throwable throwable) {
loge(getEventString(eventTime, eventName, eventDescription), throwable);
loge(getEventString(eventTime, eventName, eventDescription, throwable));
}
private void printInternalError(EventTime eventTime, String type, Exception e) {
@ -510,12 +510,21 @@ public class EventLogger implements AnalyticsListener {
}
}
private String getEventString(EventTime eventTime, String eventName) {
return eventName + " [" + getEventTimeString(eventTime) + "]";
private String getEventString(
EventTime eventTime,
String eventName,
@Nullable String eventDescription,
@Nullable Throwable throwable) {
String eventString = eventName + " [" + getEventTimeString(eventTime);
if (eventDescription != null) {
eventString += ", " + eventDescription;
}
private String getEventString(EventTime eventTime, String eventName, String eventDescription) {
return eventName + " [" + getEventTimeString(eventTime) + ", " + eventDescription + "]";
@Nullable String throwableString = Log.getThrowableString(throwable);
if (!TextUtils.isEmpty(throwableString)) {
eventString += "\n " + throwableString.replace("\n", "\n ") + '\n';
}
eventString += "]";
return eventString;
}
private String getEventTimeString(EventTime eventTime) {

View File

@ -21,6 +21,7 @@ import androidx.annotation.Nullable;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.net.UnknownHostException;
/** Wrapper around {@link android.util.Log} which allows to set the log level. */
public final class Log {
@ -69,7 +70,8 @@ public final class Log {
}
/**
* Sets whether stack traces of {@link Throwable}s will be logged to logcat.
* Sets whether stack traces of {@link Throwable}s will be logged to logcat. Stack trace logging
* is enabled by default.
*
* @param logStackTraces Whether stack traces will be logged.
*/
@ -86,11 +88,7 @@ public final class Log {
/** @see android.util.Log#d(String, String, Throwable) */
public static void d(String tag, String message, @Nullable Throwable throwable) {
if (!logStackTraces) {
d(tag, appendThrowableMessage(message, throwable));
} else if (logLevel == LOG_LEVEL_ALL) {
android.util.Log.d(tag, message, throwable);
}
d(tag, appendThrowableString(message, throwable));
}
/** @see android.util.Log#i(String, String) */
@ -102,11 +100,7 @@ public final class Log {
/** @see android.util.Log#i(String, String, Throwable) */
public static void i(String tag, String message, @Nullable Throwable throwable) {
if (!logStackTraces) {
i(tag, appendThrowableMessage(message, throwable));
} else if (logLevel <= LOG_LEVEL_INFO) {
android.util.Log.i(tag, message, throwable);
}
i(tag, appendThrowableString(message, throwable));
}
/** @see android.util.Log#w(String, String) */
@ -118,11 +112,7 @@ public final class Log {
/** @see android.util.Log#w(String, String, Throwable) */
public static void w(String tag, String message, @Nullable Throwable throwable) {
if (!logStackTraces) {
w(tag, appendThrowableMessage(message, throwable));
} else if (logLevel <= LOG_LEVEL_WARNING) {
android.util.Log.w(tag, message, throwable);
}
w(tag, appendThrowableString(message, throwable));
}
/** @see android.util.Log#e(String, String) */
@ -134,18 +124,54 @@ public final class Log {
/** @see android.util.Log#e(String, String, Throwable) */
public static void e(String tag, String message, @Nullable Throwable throwable) {
if (!logStackTraces) {
e(tag, appendThrowableMessage(message, throwable));
} else if (logLevel <= LOG_LEVEL_ERROR) {
android.util.Log.e(tag, message, throwable);
e(tag, appendThrowableString(message, throwable));
}
/**
* Returns a string representation of a {@link Throwable} suitable for logging, taking into
* account whether {@link #setLogStackTraces(boolean)} stack trace logging} is enabled.
*
* <p>Stack trace logging may be unconditionally suppressed for some expected failure modes (e.g.,
* {@link Throwable Throwables} that are expected if the device doesn't have network connectivity)
* to avoid log spam.
*
* @param throwable The {@link Throwable}.
* @return The string representation of the {@link Throwable}.
*/
@Nullable
public static String getThrowableString(@Nullable Throwable throwable) {
if (throwable == null) {
return null;
} else if (isCausedByUnknownHostException(throwable)) {
// UnknownHostException implies the device doesn't have network connectivity.
// UnknownHostException.getMessage() may return a string that's more verbose than desired for
// logging an expected failure mode. Conversely, android.util.Log.getStackTraceString has
// special handling to return the empty string, which can result in logging that doesn't
// indicate the failure mode at all. Hence we special case this exception to always return a
// concise but useful message.
return "UnknownHostException (no network)";
} else if (!logStackTraces) {
return throwable.getMessage();
} else {
return android.util.Log.getStackTraceString(throwable).trim().replace("\t", " ");
}
}
private static String appendThrowableMessage(String message, @Nullable Throwable throwable) {
if (throwable == null) {
private static String appendThrowableString(String message, @Nullable Throwable throwable) {
@Nullable String throwableString = getThrowableString(throwable);
if (!TextUtils.isEmpty(throwableString)) {
message += "\n " + throwableString.replace("\n", "\n ") + '\n';
}
return message;
}
String throwableMessage = throwable.getMessage();
return TextUtils.isEmpty(throwableMessage) ? message : message + " - " + throwableMessage;
private static boolean isCausedByUnknownHostException(@Nullable Throwable throwable) {
while (throwable != null) {
if (throwable instanceof UnknownHostException) {
return true;
}
throwable = throwable.getCause();
}
return false;
}
}

View File

@ -122,22 +122,22 @@ public final class MimeTypes {
customMimeTypes.add(customMimeType);
}
/** Returns whether the given string is an audio mime type. */
/** Returns whether the given string is an audio MIME type. */
public static boolean isAudio(@Nullable String mimeType) {
return BASE_TYPE_AUDIO.equals(getTopLevelType(mimeType));
}
/** Returns whether the given string is a video mime type. */
/** Returns whether the given string is a video MIME type. */
public static boolean isVideo(@Nullable String mimeType) {
return BASE_TYPE_VIDEO.equals(getTopLevelType(mimeType));
}
/** Returns whether the given string is a text mime type. */
/** Returns whether the given string is a text MIME type. */
public static boolean isText(@Nullable String mimeType) {
return BASE_TYPE_TEXT.equals(getTopLevelType(mimeType));
}
/** Returns whether the given string is an application mime type. */
/** Returns whether the given string is an application MIME type. */
public static boolean isApplication(@Nullable String mimeType) {
return BASE_TYPE_APPLICATION.equals(getTopLevelType(mimeType));
}
@ -173,13 +173,14 @@ public final class MimeTypes {
* @param codecs The codecs attribute.
* @return The derived video mimeType, or null if it could not be derived.
*/
public static @Nullable String getVideoMediaMimeType(@Nullable String codecs) {
@Nullable
public static String getVideoMediaMimeType(@Nullable String codecs) {
if (codecs == null) {
return null;
}
String[] codecList = Util.splitCodecs(codecs);
for (String codec : codecList) {
String mimeType = getMediaMimeType(codec);
@Nullable String mimeType = getMediaMimeType(codec);
if (mimeType != null && isVideo(mimeType)) {
return mimeType;
}
@ -193,13 +194,14 @@ public final class MimeTypes {
* @param codecs The codecs attribute.
* @return The derived audio mimeType, or null if it could not be derived.
*/
public static @Nullable String getAudioMediaMimeType(@Nullable String codecs) {
@Nullable
public static String getAudioMediaMimeType(@Nullable String codecs) {
if (codecs == null) {
return null;
}
String[] codecList = Util.splitCodecs(codecs);
for (String codec : codecList) {
String mimeType = getMediaMimeType(codec);
@Nullable String mimeType = getMediaMimeType(codec);
if (mimeType != null && isAudio(mimeType)) {
return mimeType;
}
@ -213,7 +215,8 @@ public final class MimeTypes {
* @param codec The codec identifier to derive.
* @return The mimeType, or null if it could not be derived.
*/
public static @Nullable String getMediaMimeType(@Nullable String codec) {
@Nullable
public static String getMediaMimeType(@Nullable String codec) {
if (codec == null) {
return null;
}
@ -234,7 +237,7 @@ public final class MimeTypes {
} else if (codec.startsWith("vp8") || codec.startsWith("vp08")) {
return MimeTypes.VIDEO_VP8;
} else if (codec.startsWith("mp4a")) {
String mimeType = null;
@Nullable String mimeType = null;
if (codec.startsWith("mp4a.")) {
String objectTypeString = codec.substring(5); // remove the 'mp4a.' prefix
if (objectTypeString.length() >= 2) {
@ -243,7 +246,7 @@ public final class MimeTypes {
int objectTypeInt = Integer.parseInt(objectTypeHexString, 16);
mimeType = getMimeTypeFromMp4ObjectType(objectTypeInt);
} catch (NumberFormatException ignored) {
// ignored
// Ignored.
}
}
}
@ -266,6 +269,10 @@ public final class MimeTypes {
return MimeTypes.AUDIO_VORBIS;
} else if (codec.startsWith("flac")) {
return MimeTypes.AUDIO_FLAC;
} else if (codec.startsWith("stpp")) {
return MimeTypes.APPLICATION_TTML;
} else if (codec.startsWith("wvtt")) {
return MimeTypes.TEXT_VTT;
} else {
return getCustomMimeTypeForCodec(codec);
}
@ -405,7 +412,8 @@ public final class MimeTypes {
* Returns the top-level type of {@code mimeType}, or null if {@code mimeType} is null or does not
* contain a forward slash character ({@code '/'}).
*/
private static @Nullable String getTopLevelType(@Nullable String mimeType) {
@Nullable
private static String getTopLevelType(@Nullable String mimeType) {
if (mimeType == null) {
return null;
}
@ -416,7 +424,8 @@ public final class MimeTypes {
return mimeType.substring(0, indexOfSlash);
}
private static @Nullable String getCustomMimeTypeForCodec(String codec) {
@Nullable
private static String getCustomMimeTypeForCodec(String codec) {
int customMimeTypeCount = customMimeTypes.size();
for (int i = 0; i < customMimeTypeCount; i++) {
CustomMimeType customMimeType = customMimeTypes.get(i);

View File

@ -1360,13 +1360,15 @@ public final class Util {
}
/**
* Returns whether {@code encoding} is high resolution (&gt; 16-bit) integer PCM.
* Returns whether {@code encoding} is high resolution (&gt; 16-bit) PCM.
*
* @param encoding The encoding of the audio data.
* @return Whether the encoding is high resolution integer PCM.
* @return Whether the encoding is high resolution PCM.
*/
public static boolean isEncodingHighResolutionIntegerPcm(@C.PcmEncoding int encoding) {
return encoding == C.ENCODING_PCM_24BIT || encoding == C.ENCODING_PCM_32BIT;
public static boolean isEncodingHighResolutionPcm(@C.PcmEncoding int encoding) {
return encoding == C.ENCODING_PCM_24BIT
|| encoding == C.ENCODING_PCM_32BIT
|| encoding == C.ENCODING_PCM_FLOAT;
}
/**

View File

@ -1000,6 +1000,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
processOutputFormat(getCodec(), format.width, format.height);
}
maybeNotifyVideoSizeChanged();
decoderCounters.renderedOutputBufferCount++;
maybeNotifyRenderedFirstFrame();
onProcessedOutputBuffer(presentationTimeUs);
}

View File

@ -8,5 +8,5 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

View File

@ -9,5 +9,5 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

View File

@ -7,5 +7,5 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

View File

@ -7,13 +7,13 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.
4
--> 00:00:10,901
--> 02:00:10,901
This is the fourth subtitle.
5
00:00:12,901 -->
02:00:12,901 -->
This is the fifth subtitle.

View File

@ -8,5 +8,5 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

View File

@ -0,0 +1,12 @@
1
00:00,000 --> 00:01,234
This is the first subtitle.
2
00:00:02 --> 00:00:03
This is the second subtitle.
Second subtitle with second line.
3
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

View File

@ -8,5 +8,5 @@ This is the second subtitle.
Second subtitle with second line.
3
00:00:04,567 --> 00:00:08,901
02:00:04,567 --> 02:00:08,901
This is the third subtitle.

Binary file not shown.

View File

@ -0,0 +1,641 @@
seekMap:
isSeekable = false
duration = UNSET TIME
getPosition(0) = [[timeUs=0, position=0]]
numberOfTracks = 2
track 0:
format:
bitrate = -1
id = 0
containerMimeType = null
sampleMimeType = audio/mp4a-latm
maxInputSize = -1
width = -1
height = -1
frameRate = -1.0
rotationDegrees = 0
pixelWidthHeightRatio = 1.0
channelCount = 1
sampleRate = 44100
pcmEncoding = -1
encoderDelay = 0
encoderPadding = 0
subsampleOffsetUs = 9223372036854775807
selectionFlags = 0
language = null
drmInitData = -
metadata = null
initializationData:
data = length 2, hash 5F7
total output bytes = 30797
sample count = 144
sample 0:
time = 0
flags = 1
data = length 23, hash 47DE9131
sample 1:
time = 23219
flags = 1
data = length 6, hash 31CF3A46
sample 2:
time = 46438
flags = 1
data = length 6, hash 31CF3A46
sample 3:
time = 69657
flags = 1
data = length 6, hash 31CF3A46
sample 4:
time = 92876
flags = 1
data = length 6, hash 31EC5206
sample 5:
time = 116095
flags = 1
data = length 171, hash 4F6478F6
sample 6:
time = 139314
flags = 1
data = length 202, hash AF4068A3
sample 7:
time = 162533
flags = 1
data = length 210, hash E4C10618
sample 8:
time = 185752
flags = 1
data = length 217, hash 9ECCD0D9
sample 9:
time = 208971
flags = 1
data = length 212, hash 6BAC2CD9
sample 10:
time = 232190
flags = 1
data = length 223, hash 188B6010
sample 11:
time = 255409
flags = 1
data = length 222, hash C1A04D0C
sample 12:
time = 278628
flags = 1
data = length 220, hash D65F9768
sample 13:
time = 301847
flags = 1
data = length 227, hash B96C9E14
sample 14:
time = 325066
flags = 1
data = length 229, hash 9FB09972
sample 15:
time = 348285
flags = 1
data = length 220, hash 2271F053
sample 16:
time = 371504
flags = 1
data = length 226, hash 5EDD2F4F
sample 17:
time = 394723
flags = 1
data = length 239, hash 957510E0
sample 18:
time = 417942
flags = 1
data = length 224, hash 718A8F47
sample 19:
time = 441161
flags = 1
data = length 225, hash 5E11E293
sample 20:
time = 464380
flags = 1
data = length 227, hash FCE50D27
sample 21:
time = 487599
flags = 1
data = length 212, hash 77908C40
sample 22:
time = 510818
flags = 1
data = length 227, hash 34C4EB32
sample 23:
time = 534037
flags = 1
data = length 231, hash 95488307
sample 24:
time = 557256
flags = 1
data = length 226, hash 97F12D6F
sample 25:
time = 580475
flags = 1
data = length 236, hash 91A9D9A2
sample 26:
time = 603694
flags = 1
data = length 227, hash 27A608F9
sample 27:
time = 626913
flags = 1
data = length 229, hash 57DAAE4
sample 28:
time = 650132
flags = 1
data = length 235, hash ED30AC34
sample 29:
time = 673351
flags = 1
data = length 227, hash BD3D6280
sample 30:
time = 696570
flags = 1
data = length 233, hash 694B1087
sample 31:
time = 719789
flags = 1
data = length 232, hash 1EDFE047
sample 32:
time = 743008
flags = 1
data = length 228, hash E2A831F4
sample 33:
time = 766227
flags = 1
data = length 231, hash 757E6012
sample 34:
time = 789446
flags = 1
data = length 223, hash 4003D791
sample 35:
time = 812665
flags = 1
data = length 232, hash 3CF9A07C
sample 36:
time = 835884
flags = 1
data = length 228, hash 25AC3FF7
sample 37:
time = 859103
flags = 1
data = length 220, hash 2C1824CE
sample 38:
time = 882322
flags = 1
data = length 229, hash 46FDD8FB
sample 39:
time = 905541
flags = 1
data = length 237, hash F6988018
sample 40:
time = 928760
flags = 1
data = length 242, hash 60436B6B
sample 41:
time = 951979
flags = 1
data = length 275, hash 90EDFA8E
sample 42:
time = 975198
flags = 1
data = length 242, hash 5C86EFCB
sample 43:
time = 998417
flags = 1
data = length 233, hash E0A51B82
sample 44:
time = 1021636
flags = 1
data = length 235, hash 590DF14F
sample 45:
time = 1044855
flags = 1
data = length 238, hash 69AF4E6E
sample 46:
time = 1068074
flags = 1
data = length 235, hash E745AE8D
sample 47:
time = 1091293
flags = 1
data = length 223, hash 295F2A13
sample 48:
time = 1114512
flags = 1
data = length 228, hash E2F47B21
sample 49:
time = 1137731
flags = 1
data = length 229, hash 262C3CFE
sample 50:
time = 1160950
flags = 1
data = length 232, hash 4B5BF5E8
sample 51:
time = 1184169
flags = 1
data = length 233, hash F3D80836
sample 52:
time = 1207388
flags = 1
data = length 237, hash 32E0A11E
sample 53:
time = 1230607
flags = 1
data = length 228, hash E1B89F13
sample 54:
time = 1253826
flags = 1
data = length 237, hash 8BDD9E38
sample 55:
time = 1277045
flags = 1
data = length 235, hash 3C84161F
sample 56:
time = 1300264
flags = 1
data = length 227, hash A47E1789
sample 57:
time = 1323483
flags = 1
data = length 228, hash 869FDFD3
sample 58:
time = 1346702
flags = 1
data = length 233, hash 272ECE2
sample 59:
time = 1369921
flags = 1
data = length 227, hash DB6B9618
sample 60:
time = 1393140
flags = 1
data = length 212, hash 63214325
sample 61:
time = 1416359
flags = 1
data = length 221, hash 9BA588A1
sample 62:
time = 1439578
flags = 1
data = length 225, hash 21EFD50C
sample 63:
time = 1462797
flags = 1
data = length 231, hash F3AD0BF
sample 64:
time = 1486016
flags = 1
data = length 224, hash 822C9210
sample 65:
time = 1509235
flags = 1
data = length 195, hash D4EF53EE
sample 66:
time = 1532454
flags = 1
data = length 195, hash A816647A
sample 67:
time = 1555673
flags = 1
data = length 184, hash 9A2B7E6
sample 68:
time = 1578892
flags = 1
data = length 210, hash 956E3600
sample 69:
time = 1602111
flags = 1
data = length 234, hash 35CFDA0A
sample 70:
time = 1625330
flags = 1
data = length 239, hash 9E15AC1E
sample 71:
time = 1648549
flags = 1
data = length 228, hash F3B70641
sample 72:
time = 1671768
flags = 1
data = length 237, hash 124E3194
sample 73:
time = 1694987
flags = 1
data = length 231, hash 950CD7C8
sample 74:
time = 1718206
flags = 1
data = length 236, hash A12E49AF
sample 75:
time = 1741425
flags = 1
data = length 242, hash 43BC9C24
sample 76:
time = 1764644
flags = 1
data = length 241, hash DCF0B17
sample 77:
time = 1787863
flags = 1
data = length 251, hash C0B99968
sample 78:
time = 1811082
flags = 1
data = length 245, hash 9B38ED1C
sample 79:
time = 1834301
flags = 1
data = length 238, hash 1BA69079
sample 80:
time = 1857520
flags = 1
data = length 233, hash 44C8C6BF
sample 81:
time = 1880739
flags = 1
data = length 231, hash EABBEE02
sample 82:
time = 1903958
flags = 1
data = length 226, hash D09C44FB
sample 83:
time = 1927177
flags = 1
data = length 235, hash BE6A6608
sample 84:
time = 1950396
flags = 1
data = length 235, hash 2735F454
sample 85:
time = 1973615
flags = 1
data = length 238, hash B160DFE7
sample 86:
time = 1996834
flags = 1
data = length 232, hash 1B217D2E
sample 87:
time = 2020053
flags = 1
data = length 251, hash D1C14CEA
sample 88:
time = 2043272
flags = 1
data = length 256, hash 97C87F08
sample 89:
time = 2066491
flags = 1
data = length 237, hash 6645DB3
sample 90:
time = 2089710
flags = 1
data = length 235, hash 727A1C82
sample 91:
time = 2112929
flags = 1
data = length 234, hash 5015F8B5
sample 92:
time = 2136148
flags = 1
data = length 241, hash 9102144B
sample 93:
time = 2159367
flags = 1
data = length 224, hash 64E0D807
sample 94:
time = 2182586
flags = 1
data = length 228, hash 1922B852
sample 95:
time = 2205805
flags = 1
data = length 224, hash 953502D8
sample 96:
time = 2229024
flags = 1
data = length 214, hash 92B87FE7
sample 97:
time = 2252243
flags = 1
data = length 213, hash BB0C8D86
sample 98:
time = 2275462
flags = 1
data = length 206, hash 9AD21017
sample 99:
time = 2298681
flags = 1
data = length 209, hash C479FE94
sample 100:
time = 2321900
flags = 1
data = length 220, hash 3033DCE1
sample 101:
time = 2345119
flags = 1
data = length 217, hash 7D589C94
sample 102:
time = 2368338
flags = 1
data = length 216, hash AAF6C183
sample 103:
time = 2391557
flags = 1
data = length 206, hash 1EE1207F
sample 104:
time = 2414776
flags = 1
data = length 204, hash 4BEB1210
sample 105:
time = 2437995
flags = 1
data = length 213, hash 21A841C9
sample 106:
time = 2461214
flags = 1
data = length 207, hash B80B0424
sample 107:
time = 2484433
flags = 1
data = length 212, hash 4785A1C3
sample 108:
time = 2507652
flags = 1
data = length 205, hash 59BF7229
sample 109:
time = 2530871
flags = 1
data = length 208, hash FA313DDE
sample 110:
time = 2554090
flags = 1
data = length 211, hash 190D85FD
sample 111:
time = 2577309
flags = 1
data = length 211, hash BA050052
sample 112:
time = 2600528
flags = 1
data = length 211, hash F3080F10
sample 113:
time = 2623747
flags = 1
data = length 210, hash F41B7BE7
sample 114:
time = 2646966
flags = 1
data = length 207, hash 2176C97E
sample 115:
time = 2670185
flags = 1
data = length 220, hash 32087455
sample 116:
time = 2693404
flags = 1
data = length 213, hash 4E5649A8
sample 117:
time = 2716623
flags = 1
data = length 213, hash 5F12FDCF
sample 118:
time = 2739842
flags = 1
data = length 204, hash 1E895C2A
sample 119:
time = 2763061
flags = 1
data = length 219, hash 45382270
sample 120:
time = 2786280
flags = 1
data = length 205, hash D66C6A1D
sample 121:
time = 2809499
flags = 1
data = length 204, hash 467AD01F
sample 122:
time = 2832718
flags = 1
data = length 211, hash F0435574
sample 123:
time = 2855937
flags = 1
data = length 206, hash 8C96B75F
sample 124:
time = 2879156
flags = 1
data = length 200, hash 82553248
sample 125:
time = 2902375
flags = 1
data = length 180, hash 1E51E6CE
sample 126:
time = 2925594
flags = 1
data = length 196, hash 33151DC4
sample 127:
time = 2948813
flags = 1
data = length 197, hash 1E62A7D6
sample 128:
time = 2972032
flags = 1
data = length 206, hash 6A6C4CC9
sample 129:
time = 2995251
flags = 1
data = length 209, hash A72FABAA
sample 130:
time = 3018470
flags = 1
data = length 217, hash BA33B985
sample 131:
time = 3041689
flags = 1
data = length 235, hash 9919CFD9
sample 132:
time = 3064908
flags = 1
data = length 236, hash A22C7267
sample 133:
time = 3088127
flags = 1
data = length 213, hash 3D57C901
sample 134:
time = 3111346
flags = 1
data = length 205, hash 47F68FDE
sample 135:
time = 3134565
flags = 1
data = length 210, hash 9A756E9C
sample 136:
time = 3157784
flags = 1
data = length 210, hash BD45C31F
sample 137:
time = 3181003
flags = 1
data = length 207, hash 8774FF7B
sample 138:
time = 3204222
flags = 1
data = length 149, hash 4678C0E5
sample 139:
time = 3227441
flags = 1
data = length 161, hash E991035D
sample 140:
time = 3250660
flags = 1
data = length 197, hash C3013689
sample 141:
time = 3273879
flags = 1
data = length 208, hash E6C0237
sample 142:
time = 3297098
flags = 1
data = length 232, hash A330F188
sample 143:
time = 3320317
flags = 1
data = length 174, hash 2B69C34E
track 1:
format:
bitrate = -1
id = 1
containerMimeType = null
sampleMimeType = application/id3
maxInputSize = -1
width = -1
height = -1
frameRate = -1.0
rotationDegrees = 0
pixelWidthHeightRatio = 1.0
channelCount = -1
sampleRate = -1
pcmEncoding = -1
encoderDelay = 0
encoderPadding = 0
subsampleOffsetUs = 9223372036854775807
selectionFlags = 0
language = null
drmInitData = -
metadata = null
initializationData:
total output bytes = 141
sample count = 2
sample 0:
time = 0
flags = 1
data = length 55, hash A7EB51A0
sample 1:
time = 23219
flags = 1
data = length 86, hash 3FA72D40
tracksEnded = true

View File

@ -0,0 +1,641 @@
seekMap:
isSeekable = false
duration = UNSET TIME
getPosition(0) = [[timeUs=0, position=0]]
numberOfTracks = 2
track 0:
format:
bitrate = -1
id = 0
containerMimeType = null
sampleMimeType = audio/mp4a-latm
maxInputSize = -1
width = -1
height = -1
frameRate = -1.0
rotationDegrees = 0
pixelWidthHeightRatio = 1.0
channelCount = 1
sampleRate = 44100
pcmEncoding = -1
encoderDelay = 0
encoderPadding = 0
subsampleOffsetUs = 9223372036854775807
selectionFlags = 0
language = null
drmInitData = -
metadata = null
initializationData:
data = length 2, hash 5F7
total output bytes = 30797
sample count = 144
sample 0:
time = 0
flags = 1
data = length 23, hash 47DE9131
sample 1:
time = 23219
flags = 1
data = length 6, hash 31CF3A46
sample 2:
time = 46438
flags = 1
data = length 6, hash 31CF3A46
sample 3:
time = 69657
flags = 1
data = length 6, hash 31CF3A46
sample 4:
time = 92876
flags = 1
data = length 6, hash 31EC5206
sample 5:
time = 116095
flags = 1
data = length 171, hash 4F6478F6
sample 6:
time = 139314
flags = 1
data = length 202, hash AF4068A3
sample 7:
time = 162533
flags = 1
data = length 210, hash E4C10618
sample 8:
time = 185752
flags = 1
data = length 217, hash 9ECCD0D9
sample 9:
time = 208971
flags = 1
data = length 212, hash 6BAC2CD9
sample 10:
time = 232190
flags = 1
data = length 223, hash 188B6010
sample 11:
time = 255409
flags = 1
data = length 222, hash C1A04D0C
sample 12:
time = 278628
flags = 1
data = length 220, hash D65F9768
sample 13:
time = 301847
flags = 1
data = length 227, hash B96C9E14
sample 14:
time = 325066
flags = 1
data = length 229, hash 9FB09972
sample 15:
time = 348285
flags = 1
data = length 220, hash 2271F053
sample 16:
time = 371504
flags = 1
data = length 226, hash 5EDD2F4F
sample 17:
time = 394723
flags = 1
data = length 239, hash 957510E0
sample 18:
time = 417942
flags = 1
data = length 224, hash 718A8F47
sample 19:
time = 441161
flags = 1
data = length 225, hash 5E11E293
sample 20:
time = 464380
flags = 1
data = length 227, hash FCE50D27
sample 21:
time = 487599
flags = 1
data = length 212, hash 77908C40
sample 22:
time = 510818
flags = 1
data = length 227, hash 34C4EB32
sample 23:
time = 534037
flags = 1
data = length 231, hash 95488307
sample 24:
time = 557256
flags = 1
data = length 226, hash 97F12D6F
sample 25:
time = 580475
flags = 1
data = length 236, hash 91A9D9A2
sample 26:
time = 603694
flags = 1
data = length 227, hash 27A608F9
sample 27:
time = 626913
flags = 1
data = length 229, hash 57DAAE4
sample 28:
time = 650132
flags = 1
data = length 235, hash ED30AC34
sample 29:
time = 673351
flags = 1
data = length 227, hash BD3D6280
sample 30:
time = 696570
flags = 1
data = length 233, hash 694B1087
sample 31:
time = 719789
flags = 1
data = length 232, hash 1EDFE047
sample 32:
time = 743008
flags = 1
data = length 228, hash E2A831F4
sample 33:
time = 766227
flags = 1
data = length 231, hash 757E6012
sample 34:
time = 789446
flags = 1
data = length 223, hash 4003D791
sample 35:
time = 812665
flags = 1
data = length 232, hash 3CF9A07C
sample 36:
time = 835884
flags = 1
data = length 228, hash 25AC3FF7
sample 37:
time = 859103
flags = 1
data = length 220, hash 2C1824CE
sample 38:
time = 882322
flags = 1
data = length 229, hash 46FDD8FB
sample 39:
time = 905541
flags = 1
data = length 237, hash F6988018
sample 40:
time = 928760
flags = 1
data = length 242, hash 60436B6B
sample 41:
time = 951979
flags = 1
data = length 275, hash 90EDFA8E
sample 42:
time = 975198
flags = 1
data = length 242, hash 5C86EFCB
sample 43:
time = 998417
flags = 1
data = length 233, hash E0A51B82
sample 44:
time = 1021636
flags = 1
data = length 235, hash 590DF14F
sample 45:
time = 1044855
flags = 1
data = length 238, hash 69AF4E6E
sample 46:
time = 1068074
flags = 1
data = length 235, hash E745AE8D
sample 47:
time = 1091293
flags = 1
data = length 223, hash 295F2A13
sample 48:
time = 1114512
flags = 1
data = length 228, hash E2F47B21
sample 49:
time = 1137731
flags = 1
data = length 229, hash 262C3CFE
sample 50:
time = 1160950
flags = 1
data = length 232, hash 4B5BF5E8
sample 51:
time = 1184169
flags = 1
data = length 233, hash F3D80836
sample 52:
time = 1207388
flags = 1
data = length 237, hash 32E0A11E
sample 53:
time = 1230607
flags = 1
data = length 228, hash E1B89F13
sample 54:
time = 1253826
flags = 1
data = length 237, hash 8BDD9E38
sample 55:
time = 1277045
flags = 1
data = length 235, hash 3C84161F
sample 56:
time = 1300264
flags = 1
data = length 227, hash A47E1789
sample 57:
time = 1323483
flags = 1
data = length 228, hash 869FDFD3
sample 58:
time = 1346702
flags = 1
data = length 233, hash 272ECE2
sample 59:
time = 1369921
flags = 1
data = length 227, hash DB6B9618
sample 60:
time = 1393140
flags = 1
data = length 212, hash 63214325
sample 61:
time = 1416359
flags = 1
data = length 221, hash 9BA588A1
sample 62:
time = 1439578
flags = 1
data = length 225, hash 21EFD50C
sample 63:
time = 1462797
flags = 1
data = length 231, hash F3AD0BF
sample 64:
time = 1486016
flags = 1
data = length 224, hash 822C9210
sample 65:
time = 1509235
flags = 1
data = length 195, hash D4EF53EE
sample 66:
time = 1532454
flags = 1
data = length 195, hash A816647A
sample 67:
time = 1555673
flags = 1
data = length 184, hash 9A2B7E6
sample 68:
time = 1578892
flags = 1
data = length 210, hash 956E3600
sample 69:
time = 1602111
flags = 1
data = length 234, hash 35CFDA0A
sample 70:
time = 1625330
flags = 1
data = length 239, hash 9E15AC1E
sample 71:
time = 1648549
flags = 1
data = length 228, hash F3B70641
sample 72:
time = 1671768
flags = 1
data = length 237, hash 124E3194
sample 73:
time = 1694987
flags = 1
data = length 231, hash 950CD7C8
sample 74:
time = 1718206
flags = 1
data = length 236, hash A12E49AF
sample 75:
time = 1741425
flags = 1
data = length 242, hash 43BC9C24
sample 76:
time = 1764644
flags = 1
data = length 241, hash DCF0B17
sample 77:
time = 1787863
flags = 1
data = length 251, hash C0B99968
sample 78:
time = 1811082
flags = 1
data = length 245, hash 9B38ED1C
sample 79:
time = 1834301
flags = 1
data = length 238, hash 1BA69079
sample 80:
time = 1857520
flags = 1
data = length 233, hash 44C8C6BF
sample 81:
time = 1880739
flags = 1
data = length 231, hash EABBEE02
sample 82:
time = 1903958
flags = 1
data = length 226, hash D09C44FB
sample 83:
time = 1927177
flags = 1
data = length 235, hash BE6A6608
sample 84:
time = 1950396
flags = 1
data = length 235, hash 2735F454
sample 85:
time = 1973615
flags = 1
data = length 238, hash B160DFE7
sample 86:
time = 1996834
flags = 1
data = length 232, hash 1B217D2E
sample 87:
time = 2020053
flags = 1
data = length 251, hash D1C14CEA
sample 88:
time = 2043272
flags = 1
data = length 256, hash 97C87F08
sample 89:
time = 2066491
flags = 1
data = length 237, hash 6645DB3
sample 90:
time = 2089710
flags = 1
data = length 235, hash 727A1C82
sample 91:
time = 2112929
flags = 1
data = length 234, hash 5015F8B5
sample 92:
time = 2136148
flags = 1
data = length 241, hash 9102144B
sample 93:
time = 2159367
flags = 1
data = length 224, hash 64E0D807
sample 94:
time = 2182586
flags = 1
data = length 228, hash 1922B852
sample 95:
time = 2205805
flags = 1
data = length 224, hash 953502D8
sample 96:
time = 2229024
flags = 1
data = length 214, hash 92B87FE7
sample 97:
time = 2252243
flags = 1
data = length 213, hash BB0C8D86
sample 98:
time = 2275462
flags = 1
data = length 206, hash 9AD21017
sample 99:
time = 2298681
flags = 1
data = length 209, hash C479FE94
sample 100:
time = 2321900
flags = 1
data = length 220, hash 3033DCE1
sample 101:
time = 2345119
flags = 1
data = length 217, hash 7D589C94
sample 102:
time = 2368338
flags = 1
data = length 216, hash AAF6C183
sample 103:
time = 2391557
flags = 1
data = length 206, hash 1EE1207F
sample 104:
time = 2414776
flags = 1
data = length 204, hash 4BEB1210
sample 105:
time = 2437995
flags = 1
data = length 213, hash 21A841C9
sample 106:
time = 2461214
flags = 1
data = length 207, hash B80B0424
sample 107:
time = 2484433
flags = 1
data = length 212, hash 4785A1C3
sample 108:
time = 2507652
flags = 1
data = length 205, hash 59BF7229
sample 109:
time = 2530871
flags = 1
data = length 208, hash FA313DDE
sample 110:
time = 2554090
flags = 1
data = length 211, hash 190D85FD
sample 111:
time = 2577309
flags = 1
data = length 211, hash BA050052
sample 112:
time = 2600528
flags = 1
data = length 211, hash F3080F10
sample 113:
time = 2623747
flags = 1
data = length 210, hash F41B7BE7
sample 114:
time = 2646966
flags = 1
data = length 207, hash 2176C97E
sample 115:
time = 2670185
flags = 1
data = length 220, hash 32087455
sample 116:
time = 2693404
flags = 1
data = length 213, hash 4E5649A8
sample 117:
time = 2716623
flags = 1
data = length 213, hash 5F12FDCF
sample 118:
time = 2739842
flags = 1
data = length 204, hash 1E895C2A
sample 119:
time = 2763061
flags = 1
data = length 219, hash 45382270
sample 120:
time = 2786280
flags = 1
data = length 205, hash D66C6A1D
sample 121:
time = 2809499
flags = 1
data = length 204, hash 467AD01F
sample 122:
time = 2832718
flags = 1
data = length 211, hash F0435574
sample 123:
time = 2855937
flags = 1
data = length 206, hash 8C96B75F
sample 124:
time = 2879156
flags = 1
data = length 200, hash 82553248
sample 125:
time = 2902375
flags = 1
data = length 180, hash 1E51E6CE
sample 126:
time = 2925594
flags = 1
data = length 196, hash 33151DC4
sample 127:
time = 2948813
flags = 1
data = length 197, hash 1E62A7D6
sample 128:
time = 2972032
flags = 1
data = length 206, hash 6A6C4CC9
sample 129:
time = 2995251
flags = 1
data = length 209, hash A72FABAA
sample 130:
time = 3018470
flags = 1
data = length 217, hash BA33B985
sample 131:
time = 3041689
flags = 1
data = length 235, hash 9919CFD9
sample 132:
time = 3064908
flags = 1
data = length 236, hash A22C7267
sample 133:
time = 3088127
flags = 1
data = length 213, hash 3D57C901
sample 134:
time = 3111346
flags = 1
data = length 205, hash 47F68FDE
sample 135:
time = 3134565
flags = 1
data = length 210, hash 9A756E9C
sample 136:
time = 3157784
flags = 1
data = length 210, hash BD45C31F
sample 137:
time = 3181003
flags = 1
data = length 207, hash 8774FF7B
sample 138:
time = 3204222
flags = 1
data = length 149, hash 4678C0E5
sample 139:
time = 3227441
flags = 1
data = length 161, hash E991035D
sample 140:
time = 3250660
flags = 1
data = length 197, hash C3013689
sample 141:
time = 3273879
flags = 1
data = length 208, hash E6C0237
sample 142:
time = 3297098
flags = 1
data = length 232, hash A330F188
sample 143:
time = 3320317
flags = 1
data = length 174, hash 2B69C34E
track 1:
format:
bitrate = -1
id = 1
containerMimeType = null
sampleMimeType = application/id3
maxInputSize = -1
width = -1
height = -1
frameRate = -1.0
rotationDegrees = 0
pixelWidthHeightRatio = 1.0
channelCount = -1
sampleRate = -1
pcmEncoding = -1
encoderDelay = 0
encoderPadding = 0
subsampleOffsetUs = 9223372036854775807
selectionFlags = 0
language = null
drmInitData = -
metadata = null
initializationData:
total output bytes = 141
sample count = 2
sample 0:
time = 0
flags = 1
data = length 55, hash A7EB51A0
sample 1:
time = 23219
flags = 1
data = length 86, hash 3FA72D40
tracksEnded = true

View File

@ -65,13 +65,11 @@ public class AudioFocusManagerTest {
@Test
public void setAudioAttributes_withNullUsage_doesNotManageAudioFocus() {
// Ensure that NULL audio attributes -> don't manage audio focus
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ false, Player.STATE_IDLE))
audioFocusManager.setAudioAttributes(/* audioAttributes= */ null);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ true, Player.STATE_READY))
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
@ -85,18 +83,17 @@ public class AudioFocusManagerTest {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request.durationHint).isEqualTo(AudioManager.AUDIOFOCUS_GAIN);
// Ensure that setting null audio attributes with audio focus releases audio focus.
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(/* audioAttributes= */ null);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
AudioManager.OnAudioFocusChangeListener lastRequest =
Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener();
@ -110,18 +107,16 @@ public class AudioFocusManagerTest {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(getAudioFocusGainFromRequest(request)).isEqualTo(AudioManager.AUDIOFOCUS_GAIN);
// Ensure that setting null audio attributes with audio focus releases audio focus.
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(/* audioAttributes= */ null);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
AudioFocusRequest lastRequest =
Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest();
@ -130,10 +125,10 @@ public class AudioFocusManagerTest {
@Test
public void setAudioAttributes_withUsageAlarm_throwsIllegalArgumentException() {
// Ensure that audio attributes that map to AUDIOFOCUS_GAIN_TRANSIENT* throw
// Ensure that audio attributes that map to AUDIOFOCUS_GAIN_TRANSIENT* throw.
AudioAttributes alarm = new AudioAttributes.Builder().setUsage(C.USAGE_ALARM).build();
try {
audioFocusManager.setAudioAttributes(alarm, /* playWhenReady= */ false, Player.STATE_IDLE);
audioFocusManager.setAudioAttributes(alarm);
fail();
} catch (IllegalArgumentException e) {
// Expected
@ -147,9 +142,9 @@ public class AudioFocusManagerTest {
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
@ -163,9 +158,9 @@ public class AudioFocusManagerTest {
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_ENDED))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_ENDED))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
@ -173,42 +168,221 @@ public class AudioFocusManagerTest {
}
@Test
public void handlePrepare_afterSetAudioAttributes_setsPlayerCommandPlayWhenReady() {
public void updateAudioFocus_idleToBuffering_setsPlayerCommandPlayWhenReady() {
// Ensure that when playWhenReady is true while the player is IDLE, audio focus is only
// requested after calling handlePrepare.
// requested after calling prepare (= changing the state to BUFFERING).
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_IDLE))
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAudioFocusRequest()).isNull();
assertThat(audioFocusManager.handlePrepare(/* playWhenReady= */ true))
assertThat(
audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_BUFFERING))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(getAudioFocusGainFromRequest(request)).isEqualTo(AudioManager.AUDIOFOCUS_GAIN);
}
@Test
public void updateAudioFocus_pausedToPlaying_setsPlayerCommandPlayWhenReady() {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
// Audio focus should not be requested yet, because playWhenReady=false.
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAudioFocusRequest()).isNull();
// Audio focus should be requested now that playWhenReady=true.
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(getAudioFocusGainFromRequest(request)).isEqualTo(AudioManager.AUDIOFOCUS_GAIN);
}
// See https://github.com/google/ExoPlayer/issues/7182 for context.
@Test
public void updateAudioFocus_pausedToPlaying_withTransientLoss_setsPlayerCommandPlayWhenReady() {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
// Simulate transient focus loss.
audioFocusManager.getFocusListener().onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT);
// Focus should be re-requested, rather than staying in a state of transient focus loss.
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
}
@Test
public void handleSetPlayWhenReady_afterSetAudioAttributes_setsPlayerCommandPlayWhenReady() {
// Ensure that audio focus is not requested until playWhenReady is true.
public void updateAudioFocus_pausedToPlaying_withTransientDuck_setsPlayerCommandPlayWhenReady() {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.handlePrepare(/* playWhenReady= */ false))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAudioFocusRequest()).isNull();
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAudioFocusRequest()).isNull();
assertThat(
audioFocusManager.handleSetPlayWhenReady(/* playWhenReady= */ true, Player.STATE_READY))
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
// Simulate transient ducking.
audioFocusManager
.getFocusListener()
.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK);
assertThat(testPlayerControl.lastVolumeMultiplier).isLessThan(1.0f);
// Focus should be re-requested, rather than staying in a state of transient ducking. This
// should restore the volume to 1.0.
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(testPlayerControl.lastVolumeMultiplier).isEqualTo(1.0f);
}
@Test
public void updateAudioFocus_abandonFocusWhenDucked_restoresFullVolume() {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
// Simulate transient ducking.
audioFocusManager
.getFocusListener()
.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK);
assertThat(testPlayerControl.lastVolumeMultiplier).isLessThan(1.0f);
// Configure the manager to no longer handle audio focus.
audioFocusManager.setAudioAttributes(null);
// Focus should be abandoned, which should restore the volume to 1.0.
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(testPlayerControl.lastVolumeMultiplier).isEqualTo(1.0f);
}
@Test
@Config(maxSdk = 25)
public void updateAudioFocus_readyToIdle_abandonsAudioFocus() {
// Ensure that stopping the player (=changing state to idle) abandons audio focus.
AudioAttributes media =
new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener())
.isEqualTo(request.listener);
}
@Test
@Config(minSdk = 26, maxSdk = TARGET_SDK)
public void updateAudioFocus_readyToIdle_abandonsAudioFocus_v26() {
// Ensure that stopping the player (=changing state to idle) abandons audio focus.
AudioAttributes media =
new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest())
.isEqualTo(request.audioFocusRequest);
}
@Test
@Config(maxSdk = 25)
public void updateAudioFocus_readyToIdle_withoutHandlingAudioFocus_isNoOp() {
// Ensure that changing state to idle is a no-op if audio focus isn't handled.
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(null);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request).isNull();
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
}
@Test
@Config(minSdk = 26, maxSdk = TARGET_SDK)
public void updateAudioFocus_readyToIdle_withoutHandlingAudioFocus_isNoOp_v26() {
// Ensure that changing state to idle is a no-op if audio focus isn't handled.
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(null);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request).isNull();
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ false, Player.STATE_IDLE))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
}
@Test
public void release_doesNotCallPlayerControlToRestoreVolume() {
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
// Simulate transient ducking.
audioFocusManager
.getFocusListener()
.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK);
assertThat(testPlayerControl.lastVolumeMultiplier).isLessThan(1.0f);
audioFocusManager.release();
// PlaybackController.setVolumeMultiplier should not have been called to restore the volume.
assertThat(testPlayerControl.lastVolumeMultiplier).isLessThan(1.0f);
}
@Test
@ -217,17 +391,17 @@ public class AudioFocusManagerTest {
// AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK, and returns to the default value after focus is
// regained.
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
audioFocusManager
.getFocusListener()
.onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK);
assertThat(testPlayerControl.lastVolumeMultiplier).isLessThan(1.0f);
assertThat(testPlayerControl.lastPlayerCommand).isEqualTo(NO_COMMAND_RECEIVED);
audioFocusManager.getFocusListener().onAudioFocusChange(AudioManager.AUDIOFOCUS_GAIN);
@ -243,12 +417,11 @@ public class AudioFocusManagerTest {
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
audioFocusManager
@ -261,16 +434,15 @@ public class AudioFocusManagerTest {
}
@Test
public void onAudioFocusChange_withTransientLost_sendsCommandWaitForCallback() {
public void onAudioFocusChange_withTransientLoss_sendsCommandWaitForCallback() {
// Ensure that the player is commanded to pause when audio focus is lost with
// AUDIOFOCUS_LOSS_TRANSIENT.
AudioAttributes media = new AudioAttributes.Builder().setUsage(C.USAGE_MEDIA).build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
audioFocusManager.getFocusListener().onAudioFocusChange(AudioManager.AUDIOFOCUS_LOSS_TRANSIENT);
@ -280,7 +452,7 @@ public class AudioFocusManagerTest {
@Test
@Config(maxSdk = 25)
public void onAudioFocusChange_withAudioFocusLost_sendsDoNotPlayAndAbandondsFocus() {
public void onAudioFocusChange_withFocusLoss_sendsDoNotPlayAndAbandonsFocus() {
// Ensure that AUDIOFOCUS_LOSS causes AudioFocusManager to pause playback and abandon audio
// focus.
AudioAttributes media =
@ -288,12 +460,11 @@ public class AudioFocusManagerTest {
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
@ -307,7 +478,7 @@ public class AudioFocusManagerTest {
@Test
@Config(minSdk = 26, maxSdk = TARGET_SDK)
public void onAudioFocusChange_withAudioFocusLost_sendsDoNotPlayAndAbandondsFocus_v26() {
public void onAudioFocusChange_withFocusLoss_sendsDoNotPlayAndAbandonsFocus_v26() {
// Ensure that AUDIOFOCUS_LOSS causes AudioFocusManager to pause playback and abandon audio
// focus.
AudioAttributes media =
@ -315,12 +486,11 @@ public class AudioFocusManagerTest {
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
audioFocusManager.setAudioAttributes(media);
assertThat(audioFocusManager.updateAudioFocus(/* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
@ -330,120 +500,6 @@ public class AudioFocusManagerTest {
.isEqualTo(Shadows.shadowOf(audioManager).getLastAudioFocusRequest().audioFocusRequest);
}
@Test
@Config(maxSdk = 25)
public void handleStop_withAudioFocus_abandonsAudioFocus() {
// Ensure that handleStop causes AudioFocusManager to abandon audio focus.
AudioAttributes media =
new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
audioFocusManager.handleStop();
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener())
.isEqualTo(request.listener);
}
@Test
@Config(minSdk = 26, maxSdk = TARGET_SDK)
public void handleStop_withAudioFocus_abandonsAudioFocus_v26() {
// Ensure that handleStop causes AudioFocusManager to abandon audio focus.
AudioAttributes media =
new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ true, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_PLAY_WHEN_READY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
audioFocusManager.handleStop();
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest())
.isEqualTo(request.audioFocusRequest);
}
@Test
@Config(maxSdk = 25)
public void handleStop_withoutAudioFocus_stillAbandonsFocus() {
// Ensure that handleStop causes AudioFocusManager to call through to abandon audio focus
// even if focus wasn't requested.
AudioAttributes media =
new AudioAttributes.Builder()
.setUsage(C.USAGE_MEDIA)
.setContentType(C.CONTENT_TYPE_SPEECH)
.build();
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
media, /* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request).isNull();
audioFocusManager.handleStop();
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNotNull();
}
@Test
@Config(maxSdk = 25)
public void handleStop_withoutHandlingAudioFocus_isNoOp() {
// Ensure that handleStop is a no-op if audio focus isn't handled.
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request).isNull();
audioFocusManager.handleStop();
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusListener()).isNull();
}
@Test
@Config(minSdk = 26, maxSdk = TARGET_SDK)
public void handleStop_withoutHandlingAudioFocus_isNoOp_v26() {
// Ensure that handleStop is a no-op if audio focus isn't handled.
Shadows.shadowOf(audioManager)
.setNextFocusRequestResponse(AudioManager.AUDIOFOCUS_REQUEST_GRANTED);
assertThat(
audioFocusManager.setAudioAttributes(
/* audioAttributes= */ null, /* playWhenReady= */ false, Player.STATE_READY))
.isEqualTo(PLAYER_COMMAND_DO_NOT_PLAY);
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
ShadowAudioManager.AudioFocusRequest request =
Shadows.shadowOf(audioManager).getLastAudioFocusRequest();
assertThat(request).isNull();
audioFocusManager.handleStop();
assertThat(Shadows.shadowOf(audioManager).getLastAbandonedAudioFocusRequest()).isNull();
}
private int getAudioFocusGainFromRequest(ShadowAudioManager.AudioFocusRequest audioFocusRequest) {
return Util.SDK_INT >= 26
? audioFocusRequest.audioFocusRequest.getFocusGain()

View File

@ -0,0 +1,74 @@
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import static com.google.common.truth.Truth.assertThat;
import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link PlaybackStatsListener}. */
@RunWith(AndroidJUnit4.class)
public final class PlaybackStatsListenerTest {
private static final AnalyticsListener.EventTime TEST_EVENT_TIME =
new AnalyticsListener.EventTime(
/* realtimeMs= */ 500,
Timeline.EMPTY,
/* windowIndex= */ 0,
/* mediaPeriodId= */ null,
/* eventPlaybackPositionMs= */ 0,
/* currentPlaybackPositionMs= */ 0,
/* totalBufferedDurationMs= */ 0);
@Test
public void playback_withKeepHistory_updatesStats() {
PlaybackStatsListener playbackStatsListener =
new PlaybackStatsListener(/* keepHistory= */ true, /* callback= */ null);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_BUFFERING);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_READY);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_ENDED);
@Nullable PlaybackStats playbackStats = playbackStatsListener.getPlaybackStats();
assertThat(playbackStats).isNotNull();
assertThat(playbackStats.endedCount).isEqualTo(1);
}
@Test
public void playback_withoutKeepHistory_updatesStats() {
PlaybackStatsListener playbackStatsListener =
new PlaybackStatsListener(/* keepHistory= */ false, /* callback= */ null);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_BUFFERING);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_READY);
playbackStatsListener.onPlayerStateChanged(
TEST_EVENT_TIME, /* playWhenReady= */ true, Player.STATE_ENDED);
@Nullable PlaybackStats playbackStats = playbackStatsListener.getPlaybackStats();
assertThat(playbackStats).isNotNull();
assertThat(playbackStats.endedCount).isEqualTo(1);
}
}

View File

@ -29,6 +29,11 @@ public final class AdtsExtractorTest {
ExtractorAsserts.assertBehavior(AdtsExtractor::new, "ts/sample.adts");
}
@Test
public void testSample_with_id3() throws Exception {
ExtractorAsserts.assertBehavior(AdtsExtractor::new, "ts/sample_with_id3.adts");
}
@Test
public void testSample_withSeeking() throws Exception {
ExtractorAsserts.assertBehavior(

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.extractor.wav;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.testutil.ExtractorAsserts;
import org.junit.Test;
@ -30,7 +31,7 @@ public final class WavExtractorTest {
}
@Test
public void testSampleImaAdpcm() throws Exception {
public void sample_imaAdpcm() throws Exception {
ExtractorAsserts.assertBehavior(WavExtractor::new, "wav/sample_ima_adpcm.wav");
}
}

View File

@ -372,8 +372,10 @@ public final class SampleQueueTest {
assertReadFormat(/* formatRequired= */ false, FORMAT_ENCRYPTED);
assertReadNothing(/* formatRequired= */ false);
assertThat(inputBuffer.waitingForKeys).isTrue();
when(mockDrmSession.getState()).thenReturn(DrmSession.STATE_OPENED_WITH_KEYS);
assertReadEncryptedSample(/* sampleIndex= */ 0);
assertThat(inputBuffer.waitingForKeys).isFalse();
}
@Test

View File

@ -39,6 +39,7 @@ public final class SubripDecoderTest {
private static final String TYPICAL_NEGATIVE_TIMESTAMPS = "subrip/typical_negative_timestamps";
private static final String TYPICAL_UNEXPECTED_END = "subrip/typical_unexpected_end";
private static final String TYPICAL_WITH_TAGS = "subrip/typical_with_tags";
private static final String TYPICAL_NO_HOURS_AND_MILLIS = "subrip/typical_no_hours_and_millis";
@Test
public void testDecodeEmpty() throws IOException {
@ -151,9 +152,14 @@ public final class SubripDecoderTest {
TestUtil.getByteArray(ApplicationProvider.getApplicationContext(), TYPICAL_WITH_TAGS);
Subtitle subtitle = decoder.decode(bytes, bytes.length, false);
assertTypicalCue1(subtitle, 0);
assertTypicalCue2(subtitle, 2);
assertTypicalCue3(subtitle, 4);
assertThat(subtitle.getCues(subtitle.getEventTime(0)).get(0).text.toString())
.isEqualTo("This is the first subtitle.");
assertThat(subtitle.getCues(subtitle.getEventTime(2)).get(0).text.toString())
.isEqualTo("This is the second subtitle.\nSecond subtitle with second line.");
assertThat(subtitle.getCues(subtitle.getEventTime(4)).get(0).text.toString())
.isEqualTo("This is the third subtitle.");
assertThat(subtitle.getCues(subtitle.getEventTime(6)).get(0).text.toString())
.isEqualTo("This { \\an2} is not a valid tag due to the space after the opening bracket.");
@ -172,6 +178,21 @@ public final class SubripDecoderTest {
assertAlignmentCue(subtitle, 26, Cue.ANCHOR_TYPE_START, Cue.ANCHOR_TYPE_END); // {/an9}
}
@Test
public void decodeTypicalNoHoursAndMillis() throws IOException {
SubripDecoder decoder = new SubripDecoder();
byte[] bytes =
TestUtil.getByteArray(
ApplicationProvider.getApplicationContext(), TYPICAL_NO_HOURS_AND_MILLIS);
Subtitle subtitle = decoder.decode(bytes, bytes.length, false);
assertThat(subtitle.getEventTimeCount()).isEqualTo(6);
assertTypicalCue1(subtitle, 0);
assertThat(subtitle.getEventTime(2)).isEqualTo(2_000_000);
assertThat(subtitle.getEventTime(3)).isEqualTo(3_000_000);
assertTypicalCue3(subtitle, 4);
}
private static void assertTypicalCue1(Subtitle subtitle, int eventIndex) {
assertThat(subtitle.getEventTime(eventIndex)).isEqualTo(0);
assertThat(subtitle.getCues(subtitle.getEventTime(eventIndex)).get(0).text.toString())
@ -187,10 +208,12 @@ public final class SubripDecoderTest {
}
private static void assertTypicalCue3(Subtitle subtitle, int eventIndex) {
assertThat(subtitle.getEventTime(eventIndex)).isEqualTo(4567000);
long expectedStartTimeUs = (((2L * 60L * 60L) + 4L) * 1000L + 567L) * 1000L;
assertThat(subtitle.getEventTime(eventIndex)).isEqualTo(expectedStartTimeUs);
assertThat(subtitle.getCues(subtitle.getEventTime(eventIndex)).get(0).text.toString())
.isEqualTo("This is the third subtitle.");
assertThat(subtitle.getEventTime(eventIndex + 1)).isEqualTo(8901000);
long expectedEndTimeUs = (((2L * 60L * 60L) + 8L) * 1000L + 901L) * 1000L;
assertThat(subtitle.getEventTime(eventIndex + 1)).isEqualTo(expectedEndTimeUs);
}
private static void assertAlignmentCue(

View File

@ -73,6 +73,10 @@ public final class MimeTypesTest {
assertThat(MimeTypes.getMediaMimeType("mp4a.AA")).isEqualTo(MimeTypes.AUDIO_DTS_HD);
assertThat(MimeTypes.getMediaMimeType("mp4a.AB")).isEqualTo(MimeTypes.AUDIO_DTS_HD);
assertThat(MimeTypes.getMediaMimeType("mp4a.AD")).isEqualTo(MimeTypes.AUDIO_OPUS);
assertThat(MimeTypes.getMediaMimeType("wvtt")).isEqualTo(MimeTypes.TEXT_VTT);
assertThat(MimeTypes.getMediaMimeType("stpp.")).isEqualTo(MimeTypes.APPLICATION_TTML);
assertThat(MimeTypes.getMediaMimeType("stpp.ttml.im1t")).isEqualTo(MimeTypes.APPLICATION_TTML);
}
@Test

View File

@ -807,15 +807,18 @@ public final class DashMediaSource extends BaseMediaSource {
manifestLoadPending &= manifest.dynamic;
manifestLoadStartTimestampMs = elapsedRealtimeMs - loadDurationMs;
manifestLoadEndTimestampMs = elapsedRealtimeMs;
if (manifest.location != null) {
synchronized (manifestUriLock) {
// This condition checks that replaceManifestUri wasn't called between the start and end of
// this load. If it was, we ignore the manifest location and prefer the manual replacement.
// Checks whether replaceManifestUri(Uri) was called to manually replace the URI between the
// start and end of this load. If it was then isSameUriInstance evaluates to false, and we
// prefer the manual replacement to one derived from the previous request.
@SuppressWarnings("ReferenceEquality")
boolean isSameUriInstance = loadable.dataSpec.uri == manifestUri;
if (isSameUriInstance) {
manifestUri = manifest.location;
}
// Replace the manifest URI with one specified by a manifest Location element (if present),
// or with the final (possibly redirected) URI. This follows the recommendation in
// DASH-IF-IOP 4.3, section 3.2.15.3. See: https://dashif.org/docs/DASH-IF-IOP-v4.3.pdf.
manifestUri = manifest.location != null ? manifest.location : loadable.getUri();
}
}

View File

@ -222,10 +222,11 @@ public class DashManifestParser extends DefaultHandler
protected Pair<Period, Long> parsePeriod(XmlPullParser xpp, String baseUrl, long defaultStartMs)
throws XmlPullParserException, IOException {
String id = xpp.getAttributeValue(null, "id");
@Nullable String id = xpp.getAttributeValue(null, "id");
long startMs = parseDuration(xpp, "start", defaultStartMs);
long durationMs = parseDuration(xpp, "duration", C.TIME_UNSET);
SegmentBase segmentBase = null;
@Nullable SegmentBase segmentBase = null;
@Nullable Descriptor assetIdentifier = null;
List<AdaptationSet> adaptationSets = new ArrayList<>();
List<EventStream> eventStreams = new ArrayList<>();
boolean seenFirstBaseUrl = false;
@ -246,17 +247,24 @@ public class DashManifestParser extends DefaultHandler
segmentBase = parseSegmentList(xpp, null, durationMs);
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) {
segmentBase = parseSegmentTemplate(xpp, null, Collections.emptyList(), durationMs);
} else if (XmlPullParserUtil.isStartTag(xpp, "AssetIdentifier")) {
assetIdentifier = parseDescriptor(xpp, "AssetIdentifier");
} else {
maybeSkipTag(xpp);
}
} while (!XmlPullParserUtil.isEndTag(xpp, "Period"));
return Pair.create(buildPeriod(id, startMs, adaptationSets, eventStreams), durationMs);
return Pair.create(
buildPeriod(id, startMs, adaptationSets, eventStreams, assetIdentifier), durationMs);
}
protected Period buildPeriod(String id, long startMs, List<AdaptationSet> adaptationSets,
List<EventStream> eventStreams) {
return new Period(id, startMs, adaptationSets, eventStreams);
protected Period buildPeriod(
@Nullable String id,
long startMs,
List<AdaptationSet> adaptationSets,
List<EventStream> eventStreams,
@Nullable Descriptor assetIdentifier) {
return new Period(id, startMs, adaptationSets, eventStreams, assetIdentifier);
}
// AdaptationSet parsing.

View File

@ -45,13 +45,16 @@ public class Period {
*/
public final List<EventStream> eventStreams;
/** The asset identifier for this period, if one exists */
@Nullable public final Descriptor assetIdentifier;
/**
* @param id The period identifier. May be null.
* @param startMs The start time of the period in milliseconds.
* @param adaptationSets The adaptation sets belonging to the period.
*/
public Period(@Nullable String id, long startMs, List<AdaptationSet> adaptationSets) {
this(id, startMs, adaptationSets, Collections.emptyList());
this(id, startMs, adaptationSets, Collections.emptyList(), /* assetIdentifier= */ null);
}
/**
@ -62,10 +65,27 @@ public class Period {
*/
public Period(@Nullable String id, long startMs, List<AdaptationSet> adaptationSets,
List<EventStream> eventStreams) {
this(id, startMs, adaptationSets, eventStreams, /* assetIdentifier= */ null);
}
/**
* @param id The period identifier. May be null.
* @param startMs The start time of the period in milliseconds.
* @param adaptationSets The adaptation sets belonging to the period.
* @param eventStreams The {@link EventStream}s belonging to the period.
* @param assetIdentifier The asset identifier for this period
*/
public Period(
@Nullable String id,
long startMs,
List<AdaptationSet> adaptationSets,
List<EventStream> eventStreams,
@Nullable Descriptor assetIdentifier) {
this.id = id;
this.startMs = startMs;
this.adaptationSets = Collections.unmodifiableList(adaptationSets);
this.eventStreams = Collections.unmodifiableList(eventStreams);
this.assetIdentifier = assetIdentifier;
}
/**

View File

@ -49,7 +49,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
public final class WebvttExtractor implements Extractor {
private static final Pattern LOCAL_TIMESTAMP = Pattern.compile("LOCAL:([^,]+)");
private static final Pattern MEDIA_TIMESTAMP = Pattern.compile("MPEGTS:(\\d+)");
private static final Pattern MEDIA_TIMESTAMP = Pattern.compile("MPEGTS:(-?\\d+)");
private static final int HEADER_MIN_LENGTH = 6 /* "WEBVTT" */;
private static final int HEADER_MAX_LENGTH = 3 /* optional Byte Order Mark */ + HEADER_MIN_LENGTH;

View File

@ -480,13 +480,23 @@ public final class HlsPlaylistParser implements ParsingLoadable.Parser<HlsPlayli
}
break;
case TYPE_SUBTITLES:
codecs = null;
sampleMimeType = null;
variant = getVariantWithSubtitleGroup(variants, groupId);
if (variant != null) {
codecs = Util.getCodecsOfType(variant.format.codecs, C.TRACK_TYPE_TEXT);
sampleMimeType = MimeTypes.getMediaMimeType(codecs);
}
if (sampleMimeType == null) {
sampleMimeType = MimeTypes.TEXT_VTT;
}
format =
Format.createTextContainerFormat(
/* id= */ formatId,
/* label= */ name,
/* containerMimeType= */ MimeTypes.APPLICATION_M3U8,
/* sampleMimeType= */ MimeTypes.TEXT_VTT,
/* codecs= */ null,
sampleMimeType,
codecs,
/* bitrate= */ Format.NO_VALUE,
selectionFlags,
roleFlags,
@ -569,6 +579,17 @@ public final class HlsPlaylistParser implements ParsingLoadable.Parser<HlsPlayli
return null;
}
@Nullable
private static Variant getVariantWithSubtitleGroup(ArrayList<Variant> variants, String groupId) {
for (int i = 0; i < variants.size(); i++) {
Variant variant = variants.get(i);
if (groupId.equals(variant.subtitleGroupId)) {
return variant;
}
}
return null;
}
private static HlsMediaPlaylist parseMediaPlaylist(
HlsMasterPlaylist masterPlaylist, LineIterator iterator, String baseUri) throws IOException {
@HlsMediaPlaylist.PlaylistType int playlistType = HlsMediaPlaylist.PLAYLIST_TYPE_UNKNOWN;

View File

@ -194,6 +194,19 @@ public class HlsMasterPlaylistParserTest {
+ "#EXT-X-MEDIA:TYPE=SUBTITLES,"
+ "GROUP-ID=\"sub1\",NAME=\"English\",URI=\"s1/en/prog_index.m3u8\"\n";
private static final String PLAYLIST_WITH_TTML_SUBTITLE =
" #EXTM3U\n"
+ "\n"
+ "#EXT-X-VERSION:6\n"
+ "\n"
+ "#EXT-X-INDEPENDENT-SEGMENTS\n"
+ "\n"
+ "#EXT-X-STREAM-INF:BANDWIDTH=1280000,CODECS=\"stpp.ttml.im1t,mp4a.40.2,avc1.66.30\",RESOLUTION=304x128,AUDIO=\"aud1\",SUBTITLES=\"sub1\"\n"
+ "http://example.com/low.m3u8\n"
+ "\n"
+ "#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID=\"aud1\",NAME=\"English\",URI=\"a1/index.m3u8\"\n"
+ "#EXT-X-MEDIA:TYPE=SUBTITLES,GROUP-ID=\"sub1\",NAME=\"English\",AUTOSELECT=YES,DEFAULT=YES,URI=\"s1/en/prog_index.m3u8\"\n";
@Test
public void testParseMasterPlaylist() throws IOException {
HlsMasterPlaylist masterPlaylist = parseMasterPlaylist(PLAYLIST_URI, PLAYLIST_SIMPLE);
@ -380,6 +393,18 @@ public class HlsMasterPlaylistParserTest {
.isEqualTo(createExtXMediaMetadata(/* groupId= */ "aud3", /* name= */ "English"));
}
@Test
public void parseMasterPlaylist_withTtmlSubtitle() throws IOException {
HlsMasterPlaylist playlistWithTtmlSubtitle =
parseMasterPlaylist(PLAYLIST_URI, PLAYLIST_WITH_TTML_SUBTITLE);
HlsMasterPlaylist.Variant variant = playlistWithTtmlSubtitle.variants.get(0);
Format firstTextFormat = playlistWithTtmlSubtitle.subtitles.get(0).format;
assertThat(firstTextFormat.id).isEqualTo("sub1:English");
assertThat(firstTextFormat.containerMimeType).isEqualTo(MimeTypes.APPLICATION_M3U8);
assertThat(firstTextFormat.sampleMimeType).isEqualTo(MimeTypes.APPLICATION_TTML);
assertThat(variant.format.codecs).isEqualTo("stpp.ttml.im1t,mp4a.40.2,avc1.66.30");
}
private static Metadata createExtXStreamInfMetadata(HlsTrackMetadataEntry.VariantInfo... infos) {
return new Metadata(
new HlsTrackMetadataEntry(/* groupId= */ null, /* name= */ null, Arrays.asList(infos)));

View File

@ -140,7 +140,7 @@ public class PlayerNotificationManager {
*
* @param player The {@link Player} for which a notification is being built.
*/
String getCurrentContentTitle(Player player);
CharSequence getCurrentContentTitle(Player player);
/**
* Creates a content intent for the current media item.
@ -160,7 +160,7 @@ public class PlayerNotificationManager {
* @param player The {@link Player} for which a notification is being built.
*/
@Nullable
String getCurrentContentText(Player player);
CharSequence getCurrentContentText(Player player);
/**
* Gets the content sub text for the current media item.
@ -170,7 +170,7 @@ public class PlayerNotificationManager {
* @param player The {@link Player} for which a notification is being built.
*/
@Nullable
default String getCurrentSubText(Player player) {
default CharSequence getCurrentSubText(Player player) {
return null;
}

View File

@ -143,6 +143,12 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* <li>Corresponding method: None
* <li>Default: {@code surface_view}
* </ul>
* <li><b>{@code use_sensor_rotation}</b> - Whether to use the orientation sensor for rotation
* during spherical playbacks (if available).
* <ul>
* <li>Corresponding method: {@link #setUseSensorRotation(boolean)}
* <li>Default: {@code true}
* </ul>
* <li><b>{@code shutter_background_color}</b> - The background color of the {@code exo_shutter}
* view.
* <ul>
@ -308,6 +314,7 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
@Nullable private Drawable defaultArtwork;
private @ShowBuffering int showBuffering;
private boolean keepContentOnPlayerReset;
private boolean useSensorRotation;
@Nullable private ErrorMessageProvider<? super ExoPlaybackException> errorMessageProvider;
@Nullable private CharSequence customErrorMessage;
private int controllerShowTimeoutMs;
@ -367,6 +374,7 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
boolean controllerAutoShow = true;
boolean controllerHideDuringAds = true;
int showBuffering = SHOW_BUFFERING_NEVER;
useSensorRotation = true;
if (attrs != null) {
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.PlayerView, 0, 0);
try {
@ -390,6 +398,8 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
R.styleable.PlayerView_keep_content_on_player_reset, keepContentOnPlayerReset);
controllerHideDuringAds =
a.getBoolean(R.styleable.PlayerView_hide_during_ads, controllerHideDuringAds);
useSensorRotation =
a.getBoolean(R.styleable.PlayerView_use_sensor_rotation, useSensorRotation);
} finally {
a.recycle();
}
@ -422,6 +432,7 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
case SURFACE_TYPE_SPHERICAL_GL_SURFACE_VIEW:
SphericalGLSurfaceView sphericalGLSurfaceView = new SphericalGLSurfaceView(context);
sphericalGLSurfaceView.setSingleTapListener(componentListener);
sphericalGLSurfaceView.setUseSensorRotation(useSensorRotation);
surfaceView = sphericalGLSurfaceView;
break;
case SURFACE_TYPE_VIDEO_DECODER_GL_SURFACE_VIEW:
@ -746,6 +757,22 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
}
}
/**
* Sets whether to use the orientation sensor for rotation during spherical playbacks (if
* available)
*
* @param useSensorRotation Whether to use the orientation sensor for rotation during spherical
* playbacks.
*/
public void setUseSensorRotation(boolean useSensorRotation) {
if (this.useSensorRotation != useSensorRotation) {
this.useSensorRotation = useSensorRotation;
if (surfaceView instanceof SphericalGLSurfaceView) {
((SphericalGLSurfaceView) surfaceView).setUseSensorRotation(useSensorRotation);
}
}
}
/**
* Sets whether a buffering spinner is displayed when the player is in the buffering state. The
* buffering spinner is not displayed by default.

View File

@ -72,6 +72,9 @@ public final class SphericalGLSurfaceView extends GLSurfaceView {
@Nullable private SurfaceTexture surfaceTexture;
@Nullable private Surface surface;
@Nullable private Player.VideoComponent videoComponent;
private boolean useSensorRotation;
private boolean isStarted;
private boolean isOrientationListenerRegistered;
public SphericalGLSurfaceView(Context context) {
this(context, null);
@ -104,6 +107,7 @@ public final class SphericalGLSurfaceView extends GLSurfaceView {
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = Assertions.checkNotNull(windowManager).getDefaultDisplay();
orientationListener = new OrientationListener(display, touchTracker, renderer);
useSensorRotation = true;
setEGLContextClientVersion(2);
setRenderer(renderer);
@ -145,20 +149,23 @@ public final class SphericalGLSurfaceView extends GLSurfaceView {
touchTracker.setSingleTapListener(listener);
}
/** Sets whether to use the orientation sensor for rotation (if available). */
public void setUseSensorRotation(boolean useSensorRotation) {
this.useSensorRotation = useSensorRotation;
updateOrientationListenerRegistration();
}
@Override
public void onResume() {
super.onResume();
if (orientationSensor != null) {
sensorManager.registerListener(
orientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST);
}
isStarted = true;
updateOrientationListenerRegistration();
}
@Override
public void onPause() {
if (orientationSensor != null) {
sensorManager.unregisterListener(orientationListener);
}
isStarted = false;
updateOrientationListenerRegistration();
super.onPause();
}
@ -181,6 +188,20 @@ public final class SphericalGLSurfaceView extends GLSurfaceView {
});
}
private void updateOrientationListenerRegistration() {
boolean enabled = useSensorRotation && isStarted;
if (orientationSensor == null || enabled == isOrientationListenerRegistered) {
return;
}
if (enabled) {
sensorManager.registerListener(
orientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST);
} else {
sensorManager.unregisterListener(orientationListener);
}
isOrientationListenerRegistered = enabled;
}
// Called on GL thread.
private void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture) {
mainHandler.post(

View File

@ -77,8 +77,8 @@
<enum name="always" value="2"/>
</attr>
<attr name="keep_content_on_player_reset" format="boolean"/>
<attr name="use_sensor_rotation" format="boolean"/>
<attr name="player_layout_id" format="reference"/>
<attr name="surface_type"/>
<!-- AspectRatioFrameLayout attributes -->
<attr name="resize_mode"/>