mirror of
https://github.com/androidx/media.git
synced 2025-04-29 22:36:54 +08:00
Compare commits
224 Commits
4045acd13b
...
6034a3c3d6
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6034a3c3d6 | ||
![]() |
a1738f96f9 | ||
![]() |
ae563dbab0 | ||
![]() |
75a067a223 | ||
![]() |
d5dcbf4a12 | ||
![]() |
a472300c7a | ||
![]() |
1230573dfb | ||
![]() |
e01467eb5e | ||
![]() |
5f7066a9d9 | ||
![]() |
3be1d5646f | ||
![]() |
3f5019b908 | ||
![]() |
6280ab24c2 | ||
![]() |
2d4b37dc28 | ||
![]() |
99f364992a | ||
![]() |
cf0d9d207f | ||
![]() |
2a4cbc3be4 | ||
![]() |
2d1bcc77be | ||
![]() |
73f74710b9 | ||
![]() |
a37e906a0a | ||
![]() |
ecad8a5357 | ||
![]() |
9bc088bf2b | ||
![]() |
1db0fe2d19 | ||
![]() |
e793c15ec0 | ||
![]() |
88da587b0a | ||
![]() |
8e56810b57 | ||
![]() |
d6b9988eb0 | ||
![]() |
b0bca83811 | ||
![]() |
71ff9c661c | ||
![]() |
14c06eaf8e | ||
![]() |
d0d76f214a | ||
![]() |
06c0f5549e | ||
![]() |
6470c97af4 | ||
![]() |
0991dbcd7d | ||
![]() |
79f29680fc | ||
![]() |
b1068e47d3 | ||
![]() |
eef678f263 | ||
![]() |
0e169ab1be | ||
![]() |
4daa43b257 | ||
![]() |
bdc2216492 | ||
![]() |
9ac58fa405 | ||
![]() |
059cb23f3d | ||
![]() |
0c78bae111 | ||
![]() |
40ab0d40a1 | ||
![]() |
4932300b9a | ||
![]() |
ff0a359e93 | ||
![]() |
ce7bf8f76c | ||
![]() |
343a7b054e | ||
![]() |
42b71c29e8 | ||
![]() |
75d7d1d71b | ||
![]() |
27eb204542 | ||
![]() |
cc4ffbe8cf | ||
![]() |
b47b8ffcd8 | ||
![]() |
8837ab2564 | ||
![]() |
de9216462d | ||
![]() |
800a66a8ca | ||
![]() |
6b1a2aff98 | ||
![]() |
df489e2f94 | ||
![]() |
641434ff31 | ||
![]() |
fe8163838e | ||
![]() |
54c64b41c4 | ||
![]() |
d37f05238a | ||
![]() |
412ba2e201 | ||
![]() |
dfebe72b6a | ||
![]() |
41722be02e | ||
![]() |
d777a11840 | ||
![]() |
816d5cb86b | ||
![]() |
6edb687aef | ||
![]() |
4991e623c9 | ||
![]() |
aa6d8a7666 | ||
![]() |
6d00fe58b5 | ||
![]() |
03892cc1b5 | ||
![]() |
99767c6e25 | ||
![]() |
1918a256cc | ||
![]() |
2729dbb8a9 | ||
![]() |
a7c727e2f3 | ||
![]() |
593c6fa1e8 | ||
![]() |
ecac78f630 | ||
![]() |
44b3a43652 | ||
![]() |
ce59680d0f | ||
![]() |
e8842b939c | ||
![]() |
a110b02142 | ||
![]() |
168c464095 | ||
![]() |
06163f3dfa | ||
![]() |
222950cfd1 | ||
![]() |
66995a8816 | ||
![]() |
15fa27cd9a | ||
![]() |
a4442a6cc5 | ||
![]() |
91ecc16198 | ||
![]() |
8702554591 | ||
![]() |
bf2e338fc2 | ||
![]() |
cfbd6c5493 | ||
![]() |
8dcfa1afbe | ||
![]() |
96d98a24fc | ||
![]() |
03a0fb4219 | ||
![]() |
c030e49dd6 | ||
![]() |
c95516f0ab | ||
![]() |
377136419d | ||
![]() |
ab2affa5a5 | ||
![]() |
7e6089d3aa | ||
![]() |
f996a5e3e4 | ||
![]() |
d7574ffd66 | ||
![]() |
d7163534ff | ||
![]() |
bc9a974e07 | ||
![]() |
06f340005f | ||
![]() |
5a0f4c6b3f | ||
![]() |
4ab7ddea93 | ||
![]() |
c3d734066d | ||
![]() |
b36d0483b2 | ||
![]() |
c044af9063 | ||
![]() |
7a00841323 | ||
![]() |
2da814c8ef | ||
![]() |
a92cf5311f | ||
![]() |
cdb112a85a | ||
![]() |
7749ff23f6 | ||
![]() |
7419a81aa7 | ||
![]() |
814d368d9f | ||
![]() |
604a8cc0da | ||
![]() |
da8df3fb81 | ||
![]() |
1e4a10651a | ||
![]() |
86df7572d2 | ||
![]() |
1361d569cd | ||
![]() |
8ea49025aa | ||
![]() |
796df136d7 | ||
![]() |
2088697a19 | ||
![]() |
5f1af46f56 | ||
![]() |
f8c3af52e9 | ||
![]() |
66ef013cb8 | ||
![]() |
1ac82d9824 | ||
![]() |
b465cbc22c | ||
![]() |
c1a269b79f | ||
![]() |
a3c9060daa | ||
![]() |
addf01b9a8 | ||
![]() |
366e5eccf8 | ||
![]() |
ca80c001e2 | ||
![]() |
e4369b2317 | ||
![]() |
275e7d3dbd | ||
![]() |
5417279982 | ||
![]() |
e3e9d88b04 | ||
![]() |
37aaef0b32 | ||
![]() |
b38d3f8f0d | ||
![]() |
afea6962c2 | ||
![]() |
ac1cf206c8 | ||
![]() |
d16fdcb8cc | ||
![]() |
20ea05063b | ||
![]() |
5d9c2e309e | ||
![]() |
a6debf4904 | ||
![]() |
4f039eaafa | ||
![]() |
676fe86028 | ||
![]() |
8937fb1df7 | ||
![]() |
19c7b21275 | ||
![]() |
2c866ce50b | ||
![]() |
6ffa0854d5 | ||
![]() |
cecee3bdda | ||
![]() |
57d4f8354a | ||
![]() |
c4eef6042b | ||
![]() |
ecb83f3b73 | ||
![]() |
d58740367b | ||
![]() |
a3f281dff8 | ||
![]() |
da402cfd64 | ||
![]() |
1399e77249 | ||
![]() |
51eb4d6504 | ||
![]() |
cc44de8757 | ||
![]() |
4f08312ea5 | ||
![]() |
c950f1c5e3 | ||
![]() |
5b97ad71f4 | ||
![]() |
3b38a7a43b | ||
![]() |
85467b9b57 | ||
![]() |
94e8a64938 | ||
![]() |
bdbcdf1660 | ||
![]() |
a016adc6b1 | ||
![]() |
fe19d8c9be | ||
![]() |
3f493eaf9e | ||
![]() |
a75da2f5f3 | ||
![]() |
ee6eb98d4b | ||
![]() |
493e5a5eb1 | ||
![]() |
2f5a72a165 | ||
![]() |
5610cc8465 | ||
![]() |
b4bff587c0 | ||
![]() |
1d5f7598e9 | ||
![]() |
dd641c1979 | ||
![]() |
c90ca4e86e | ||
![]() |
76df13d390 | ||
![]() |
67e99f4648 | ||
![]() |
fbe186a70c | ||
![]() |
4c3ac81873 | ||
![]() |
7c2e8c1c4b | ||
![]() |
b6aa21a427 | ||
![]() |
05c8a66dc2 | ||
![]() |
2b12a57447 | ||
![]() |
d35fccef59 | ||
![]() |
7cc97ddce2 | ||
![]() |
378e70e15f | ||
![]() |
e889632971 | ||
![]() |
6121445718 | ||
![]() |
da360a67cb | ||
![]() |
d81321174e | ||
![]() |
c144420be8 | ||
![]() |
07150b816f | ||
![]() |
7bbc7e03db | ||
![]() |
277f969c5c | ||
![]() |
82ba4060a4 | ||
![]() |
8253530688 | ||
![]() |
4015db57a5 | ||
![]() |
4adbeed6f6 | ||
![]() |
dbf864fd69 | ||
![]() |
7a85cfb4bd | ||
![]() |
a32c768ea1 | ||
![]() |
23ebea7ab4 | ||
![]() |
26cf8e1e94 | ||
![]() |
ea1562b7cc | ||
![]() |
a17a3ebc67 | ||
![]() |
d0ca20184e | ||
![]() |
0a648200ba | ||
![]() |
2644409801 | ||
![]() |
72884c3bf3 | ||
![]() |
521871b9d5 | ||
![]() |
72c85aa483 | ||
![]() |
a5ffae17c3 | ||
![]() |
daf8f9ff58 | ||
![]() |
ae3eed2343 | ||
![]() |
462533219d | ||
![]() |
1461e9e93a | ||
![]() |
525588eec5 | ||
![]() |
b32ac9b195 |
544
RELEASENOTES.md
544
RELEASENOTES.md
@ -3,247 +3,125 @@
|
||||
### Unreleased changes
|
||||
|
||||
* Common Library:
|
||||
* Change `SimpleBasePlayer.State` access from protected to public to make
|
||||
it easier to handle updates in other classes
|
||||
([#2128](https://github.com/androidx/media/issues/2128)).
|
||||
* ExoPlayer:
|
||||
* Transformer:
|
||||
* Track Selection:
|
||||
* Extractors:
|
||||
* MP3: Use duration and data size from unseekable Xing, VBRI and similar
|
||||
variable bitrate metadata when falling back to constant bitrate seeking
|
||||
due to `FLAG_ENABLE_CONSTANT_BITRATE_SEEKING(_ALWAYS)`
|
||||
([#2194](https://github.com/androidx/media/issues/2194)).
|
||||
* DataSource:
|
||||
* Audio:
|
||||
* Allow constant power upmixing/downmixing in DefaultAudioMixer.
|
||||
* Video:
|
||||
* Add experimental `ExoPlayer` API to include the
|
||||
`MediaCodec.BUFFER_FLAG_DECODE_ONLY` flag when queuing decode-only input
|
||||
buffers. This flag will signal the decoder to skip the decode-only
|
||||
buffers thereby resulting in faster seeking. Enable it with
|
||||
`DefaultRenderersFactory.experimentalSetEnableMediaCodecBufferDecodeOnlyFlag`.
|
||||
* Improve codec performance checks for software video codecs. This may
|
||||
lead to some additional tracks being marked as `EXCEEDS_CAPABILITIES`.
|
||||
* Text:
|
||||
* Metadata:
|
||||
* Image:
|
||||
* DataSource:
|
||||
* DRM:
|
||||
* Effect:
|
||||
* Add `Presentation.createForShortSide(int)` that creates a `Presentation`
|
||||
that ensures the shortest side always matches the given value,
|
||||
regardless of input orientation.
|
||||
* Muxers:
|
||||
* `writeSampleData()` API now uses muxer specific `BufferInfo` class
|
||||
instead of `MediaCodec.BufferInfo`.
|
||||
* IMA extension:
|
||||
* Session:
|
||||
* Make `MediaSession.setSessionActivity(PendingIntent)` accept null
|
||||
([#2109](https://github.com/androidx/media/issues/2109)).
|
||||
* UI:
|
||||
* Downloads:
|
||||
* OkHttp Extension:
|
||||
* Cronet Extension:
|
||||
* RTMP Extension:
|
||||
* HLS Extension:
|
||||
* DASH Extension:
|
||||
* Smooth Streaming Extension:
|
||||
* RTSP Extension:
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* Add partial download support for progressive streams. Apps can prepare a
|
||||
progressive stream with `DownloadHelper`, and request a
|
||||
`DownloadRequest` from the helper with specifying the time-based media
|
||||
start and end positions that the download should cover. The returned
|
||||
`DownloadRequest` carries the resolved byte range, with which a
|
||||
`ProgressiveDownloader` can be created and download the content
|
||||
correspondingly.
|
||||
* Add `DownloadHelper.Factory` with which the static
|
||||
`DownloadHelper.forMediaItem()` methods are replaced.
|
||||
* OkHttp extension:
|
||||
* Cronet extension:
|
||||
* RTMP extension:
|
||||
* HLS extension:
|
||||
* DASH extension:
|
||||
* Smooth Streaming extension:
|
||||
* RTSP extension:
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* MIDI extension:
|
||||
* Leanback extension:
|
||||
* Cast Extension:
|
||||
* Cast extension:
|
||||
* Add support for playlist metadata
|
||||
([#2235](https://github.com/androidx/media/pull/2235)).
|
||||
* Test Utilities:
|
||||
* Demo app:
|
||||
* Add `PlaybackSpeedPopUpButton` Composable UI element to be part of
|
||||
`ExtraControls` in `demo-compose`.
|
||||
* Remove deprecated symbols:
|
||||
* Removed deprecated `SegmentDownloader` constructor
|
||||
`SegmentDownloader(MediaItem, Parser<M>, CacheDataSource.Factory,
|
||||
Executor)` and the corresponding constructors in its subclasses
|
||||
`DashDownloader`, `HlsDownloader` and `SsDownloader`.
|
||||
* Removed deprecated `Player.hasNext()`, `Player.hasNextWindow()`. Use
|
||||
`Player.hasNextMediaItem()` instead.
|
||||
* Removed deprecated `Player.next()`. Use `Player.seekToNextMediaItem()`
|
||||
instead.
|
||||
* Removed deprecated `Player.seekToPreviousWindow()`. Use
|
||||
`Player.seekToPreviousMediaItem()` instead.
|
||||
* Removed deprecated `Player.seekToNextWindow()`. Use
|
||||
`Player.seekToNextMediaItem()` instead.
|
||||
* Removed deprecated `BaseAudioProcessor` in `exoplayer` module. Use
|
||||
`BaseAudioProcessor` under `common` module.
|
||||
|
||||
## 1.6
|
||||
|
||||
### 1.6.0-beta01 (2025-02-26)
|
||||
### 1.6.0 (2025-03-26)
|
||||
|
||||
This release includes the following changes since the
|
||||
[1.6.0-alpha03 release](#160-alpha03-2025-02-06):
|
||||
[1.5.1 release](#151-2024-12-19):
|
||||
|
||||
* Common Library:
|
||||
* Add `AudioManagerCompat` and `AudioFocusRequestCompat` to replace the
|
||||
equivalent classes in `androidx.media`.
|
||||
* Upgrade Kotlin from 1.9.20 to 2.0.20 and use Compose Compiler Gradle
|
||||
plugin. Upgrade KotlinX Coroutines library from 1.8.1 to 1.9.0.
|
||||
* ExoPlayer:
|
||||
* Initial audio session id is no longer immediately available after
|
||||
creating the player. You can use
|
||||
`AnalyticsListener.onAudioSessionIdChanged` to listen to the initial
|
||||
update if required.
|
||||
* Transformer:
|
||||
* Add `MediaProjectionAssetLoader`, which provides media from a
|
||||
`MediaProjection` for screen recording, and add support for screen
|
||||
recording to the Transformer demo app.
|
||||
* Add `#getInputFormat()` to `Codec` interface.
|
||||
* Shift the responsibility to release the `GlObjectsProvider` onto the
|
||||
caller in `DefaultVideoFrameProcessor` and `DefaultVideoCompositor` when
|
||||
possible.
|
||||
* Video:
|
||||
* Add experimental `ExoPlayer` API to drop late `MediaCodecVideoRenderer`
|
||||
decoder input buffers that are not depended on. Enable it with
|
||||
`DefaultRenderersFactory.experimentalSetLateThresholdToDropDecoderInputUs`.
|
||||
* Session:
|
||||
* Keep foreground service state for an additional 10 minutes when playback
|
||||
pauses, stops or fails. This allows users to resume playback within this
|
||||
timeout without risking foreground service restrictions on various
|
||||
devices. Note that simply calling `player.pause()` can no longer be used
|
||||
to stop the foreground service before `stopSelf()` when overriding
|
||||
`onTaskRemoved`, use `MediaSessionService.pauseAllPlayersAndStopSelf()`
|
||||
instead.
|
||||
* Keep notification visible when playback enters an error or stopped
|
||||
state. The notification is only removed if the playlist is cleared or
|
||||
the player is released.
|
||||
* Improve handling of Android platform MediaSession actions ACTION_PLAY
|
||||
and ACTION_PAUSE to only set one of them according to the available
|
||||
commands and also accept if only one of them is set.
|
||||
* Remove deprecated symbols:
|
||||
* Removed the following deprecated `DownloadHelper` methods:
|
||||
* Constructor `DownloadHelper(MediaItem, @Nullable MediaSource,
|
||||
TrackSelectionParameters, RendererCapabilities[])`, use
|
||||
`DownloadHelper(MediaItem, @Nullable MediaSource,
|
||||
TrackSelectionParameters, RendererCapabilitiesList)` instead.
|
||||
* `getRendererCapabilities(RenderersFactory)`, equivalent
|
||||
functionality can be achieved by creating a
|
||||
`DefaultRendererCapabilitiesList` with a `RenderersFactory`, and
|
||||
calling `DefaultRendererCapabilitiesList.getRendererCapabilities()`.
|
||||
* Removed
|
||||
`PlayerNotificationManager.setMediaSessionToken(MediaSessionCompat)`
|
||||
method. Use
|
||||
`PlayerNotificationManager.setMediaSessionToken(MediaSession.Token)` and
|
||||
pass in `(MediaSession.Token) compatToken.getToken()`instead.
|
||||
|
||||
### 1.6.0-alpha03 (2025-02-06)
|
||||
|
||||
This release includes the following changes since the
|
||||
[1.6.0-alpha02 release](#160-alpha02-2025-01-30):
|
||||
|
||||
* ExoPlayer:
|
||||
* Add option to `ClippingMediaSource` to allow clipping in unseekable
|
||||
media.
|
||||
* Fix bug where seeking with pre-warming could block following media item
|
||||
transition.
|
||||
* Audio:
|
||||
* Make `androidx.media3.common.audio.SonicAudioProcessor` final.
|
||||
* Video:
|
||||
* Change `MediaCodecVideoRenderer.shouldUsePlaceholderSurface` to
|
||||
protected so that applications can override to block usage of
|
||||
placeholder surfaces
|
||||
([#1905](https://github.com/androidx/media/pull/1905)).
|
||||
* Add experimental `ExoPlayer` AV1 sample dependency parsing to speed up
|
||||
seeking. Enable it with the new
|
||||
`DefaultRenderersFactory.experimentalSetParseAv1SampleDependencies` API.
|
||||
* Muxers:
|
||||
* Disable `Mp4Muxer` sample batching and copying by default.
|
||||
* Remove deprecated symbols:
|
||||
* Removed `androidx.media3.exoplayer.audio.SonicAudioProcessor`.
|
||||
|
||||
### 1.6.0-alpha02 (2025-01-30)
|
||||
|
||||
This release includes the following changes since the
|
||||
[1.6.0-alpha01 release](#160-alpha01-2024-12-20):
|
||||
|
||||
* Common Library:
|
||||
* Remove `Format.toBundle(boolean excludeMetadata)` method, use
|
||||
`Format.toBundle()` instead.
|
||||
* Fix bug in `SimpleBasePlayer` where setting a new
|
||||
`currentMediaItemIndex` in `State` after `setPlaylist` with `null`
|
||||
`MediaMetadata` does not reevaluate the metadata
|
||||
([#1940](https://github.com/androidx/media/issues/1940)).
|
||||
* Change `SimpleBasePlayer.State` access from protected to public to make
|
||||
it easier to handle updates in other classes
|
||||
([#2128](https://github.com/androidx/media/issues/2128)).
|
||||
* ExoPlayer:
|
||||
* Add `MediaExtractorCompat`, a new class that provides equivalent
|
||||
features to platform `MediaExtractor`.
|
||||
* Add experimental 'ExoPlayer' pre-warming support for playback using
|
||||
`MediaCodecVideoRenderer`. You can configure `DefaultRenderersFactory`
|
||||
through `experimentalSetEnableMediaCodecVideoRendererPrewarming` to
|
||||
provide a secondary `MediaCodecVideoRenderer` to `ExoPlayer`. If
|
||||
enabled, `ExoPlayer` pre-processes the video of consecutive media items
|
||||
during playback to reduce media item transition latency.
|
||||
* Fix issue where additional decode-only frames may be displayed in quick
|
||||
succession when transitioning to content media after a mid-roll ad.
|
||||
* Make `DefaultRenderersFactory` add two `MetadataRenderer` instances to
|
||||
enable apps to receive two different schemes of metadata by default.
|
||||
* Reduce default values for `bufferForPlaybackMs` and
|
||||
`bufferForPlaybackAfterRebufferMs` in `DefaultLoadControl` to 1000 and
|
||||
2000 ms respectively.
|
||||
* Initialize `DeviceInfo` and device volume asynchronously (if enabled
|
||||
using `setDeviceVolumeControlEnabled`). These values aren't available
|
||||
instantly after `ExoPlayer.Builder.build()`, and `Player.Listener`
|
||||
notifies changes through `onDeviceInfoChanged` and
|
||||
`onDeviceVolumeChanged`.
|
||||
* Reevaluate whether the ongoing load of a chunk should be cancelled when
|
||||
playback is paused
|
||||
([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* Transformer:
|
||||
* Enable support for Android platform diagnostics using
|
||||
`MediaMetricsManager`. Transformer forwards editing events and
|
||||
performance data to the platform, which helps to provide system
|
||||
performance and debugging information on the device. This data may also
|
||||
be collected by Google
|
||||
[if sharing usage and diagnostics data is enabled](https://support.google.com/accounts/answer/6078260)
|
||||
by the user of the device. Apps can opt-out of contributing to platform
|
||||
diagnostics for Transformer with
|
||||
`Transformer.Builder.setUsePlatformDiagnostics(false)`.
|
||||
* Split `InAppMuxer` into `InAppMp4Muxer` and `InAppFragmentedMp4Muxer`.
|
||||
You use `InAppMp4Muxer` to produce a non-fragmented MP4 file, while
|
||||
`InAppFragmentedMp4Muxer` is for producing a fragmented MP4 file.
|
||||
* Move `Muxer` interface from `media3-muxer` to `media3-transformer`.
|
||||
* Add support for transcoding and transmuxing Dolby Vision (profile 8)
|
||||
format.
|
||||
* Extractors:
|
||||
* Fix handling of NAL units with lengths expressed in 1 or 2 bytes (rather
|
||||
than 4).
|
||||
* Fix `ArrayIndexOutOfBoundsException` in MP4 edit lists when the edit
|
||||
list starts at a non-sync frame with no preceding sync frame
|
||||
([#2062](https://github.com/androidx/media/issues/2062)).
|
||||
* Audio:
|
||||
* Don't bypass `SonicAudioProcessor` when `SpeedChangingAudioProcessor` is
|
||||
configured with default parameters.
|
||||
* Fix underflow in `Sonic#getOutputSize()` that could cause
|
||||
`DefaultAudioSink` to stall.
|
||||
* Fix `MediaCodecAudioRenderer.getDurationToProgressUs()` and
|
||||
`DecoderAudioRenderer.getDurationToProgressUs()` so that seeks correctly
|
||||
reset the provided durations.
|
||||
* Text:
|
||||
* TTML: Add support for referencing `tts:origin` and `tts:extent` using
|
||||
`style` ([#2953](https://github.com/google/ExoPlayer/issues/2953)).
|
||||
* Restrict WebVTT and SubRip timestamps to exactly 3 decimal places.
|
||||
Previously we incorrectly parsed any number of decimal places but always
|
||||
assumed the value was in milliseconds, leading to incorrect timestamps
|
||||
([#1997](https://github.com/androidx/media/issues/1997)).
|
||||
* Add support for VobSub subtitles
|
||||
([#8260](https://github.com/google/ExoPlayer/issues/8260)).
|
||||
* Fix playback hanging when a playlist contains clipped items with CEA-608
|
||||
or CEA-708 captions.
|
||||
* Fix `IllegalStateException` when an SSA file contains a cue with zero
|
||||
duration (start and end time equal)
|
||||
([#2052](https://github.com/androidx/media/issues/2052)).
|
||||
* Suppress (and log) subtitle parsing errors when subtitles are muxed into
|
||||
the same container as audio and video
|
||||
([#2052](https://github.com/androidx/media/issues/2052)).
|
||||
* Muxers:
|
||||
* Renamed `setSampleCopyEnabled()` method to `setSampleCopyingEnabled()`
|
||||
in both `Mp4Muxer.Builder` and `FragmentedMp4Muxer.Builder`.
|
||||
* `Mp4Muxer.addTrack()` and `FragmentedMp4Muxer.addTrack()` now return an
|
||||
`int` track ID instead of a `TrackToken`.
|
||||
* `Mp4Muxer` and `FragmentedMp4Muxer` no longer implement `Muxer`
|
||||
interface.
|
||||
* Session:
|
||||
* Fix bug where calling a `Player` method on a `MediaController` connected
|
||||
to a legacy session dropped changes from a pending update.
|
||||
* UI:
|
||||
* Add `PresentationState` state holder class and the corresponding
|
||||
`rememberPresentationState` Composable to `media3-ui-compose`.
|
||||
* HLS Extension:
|
||||
* Parse `SUPPLEMENTAL-CODECS` tag from HLS playlist to detect Dolby Vision
|
||||
formats ([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* DASH Extension:
|
||||
* Fix issue when calculating the update interval for ad insertion in
|
||||
multi-period live streams
|
||||
([#1698](https://github.com/androidx/media/issues/1698)).
|
||||
* Parse `scte214:supplementalCodecs` attribute from DASH manifest to
|
||||
detect Dolby Vision formats
|
||||
([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* Improve handling of period transitions in live streams where the period
|
||||
contains media samples beyond the declared period duration
|
||||
([#1698](https://github.com/androidx/media/issues/1698)).
|
||||
* Demo app:
|
||||
* Use `PresentationState` to control the aspect ratio of `PlayerSurface`
|
||||
Composable. This depends on the ContentScale type and covers it with a
|
||||
shutter-overlay before the first frame is rendered.
|
||||
* Remove deprecated symbols:
|
||||
* Removed `ExoPlayer.VideoComponent`, `ExoPlayer.AudioComponent`,
|
||||
`ExoPlayer.TextComponent` and `ExoPlayer.DeviceComponent`.
|
||||
|
||||
### 1.6.0-alpha01 (2024-12-20)
|
||||
|
||||
This release includes the following changes since the
|
||||
[1.5.1 release](#151-2024-12-19):
|
||||
|
||||
* Common Library:
|
||||
* Remove `Format.toBundle(boolean excludeMetadata)` method, use
|
||||
`Format.toBundle()` instead.
|
||||
* Add `AudioManagerCompat` and `AudioFocusRequestCompat` to replace the
|
||||
equivalent classes in `androidx.media`.
|
||||
* ExoPlayer:
|
||||
* Initial audio session id is no longer immediately available after
|
||||
creating the player. You can use
|
||||
`AnalyticsListener.onAudioSessionIdChanged` to listen to the initial
|
||||
update if required.
|
||||
* Consider language when selecting a video track. By default, select a
|
||||
'main' video track that matches the language of the selected audio
|
||||
track, if available. Explicit video language preferences can be
|
||||
@ -257,11 +135,6 @@ This release includes the following changes since the
|
||||
with durations that don't match the actual content could cause frame
|
||||
freezes at the end of the item
|
||||
([#1698](https://github.com/androidx/media/issues/1698)).
|
||||
* Reduce default values for `bufferForPlaybackMs` and
|
||||
`bufferForPlaybackAfterRebufferMs` in `DefaultLoadControl` to 1000 and
|
||||
2000 ms respectively.
|
||||
* Add `MediaExtractorCompat`, a new class that provides equivalent
|
||||
features to platform `MediaExtractor`.
|
||||
* Move `BasePreloadManager.Listener` to a top-level
|
||||
`PreloadManagerListener`.
|
||||
* `RenderersFactory.createSecondaryRenderer` can be implemented to provide
|
||||
@ -276,29 +149,121 @@ This release includes the following changes since the
|
||||
* Change `AdsMediaSource` to allow the `AdPlaybackStates` to grow by
|
||||
appending ad groups. Invalid modifications are detected and throw an
|
||||
exception.
|
||||
* Fix issue where additional decode-only frames may be displayed in quick
|
||||
succession when transitioning to content media after a mid-roll ad.
|
||||
* Make `DefaultRenderersFactory` add two `MetadataRenderer` instances to
|
||||
enable apps to receive two different schemes of metadata by default.
|
||||
* Reevaluate whether the ongoing load of a chunk should be cancelled when
|
||||
playback is paused
|
||||
([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* Add option to `ClippingMediaSource` to allow clipping in unseekable
|
||||
media.
|
||||
* Fix bug where seeking with pre-warming could block following media item
|
||||
transition.
|
||||
* Fix a bug where `ExoPlayer.isLoading()` remains `true` while it has
|
||||
transitioned to `STATE_IDLE` or `STATE_ENDED`
|
||||
([#2133](https://github.com/androidx/media/issues/2133)).
|
||||
* Add `lastRebufferRealtimeMs` to `LoadControl.Parameter`
|
||||
([#2113](https://github.com/androidx/media/pull/2113)).
|
||||
* Transformer:
|
||||
* Update parameters of `VideoFrameProcessor.registerInputStream` and
|
||||
`VideoFrameProcessor.Listener.onInputStreamRegistered` to use `Format`.
|
||||
* Add support for transmuxing into alternative backward compatible
|
||||
formats.
|
||||
* Add support for transcoding and transmuxing Dolby Vision (profile 8)
|
||||
format.
|
||||
* Update parameters of `VideoFrameProcessor.registerInputStream` and
|
||||
`VideoFrameProcessor.Listener.onInputStreamRegistered` to use `Format`.
|
||||
* Generate HDR static metadata when using `DefaultEncoderFactory`.
|
||||
* Enable support for Android platform diagnostics using
|
||||
`MediaMetricsManager`. Transformer forwards editing events and
|
||||
performance data to the platform, which helps to provide system
|
||||
performance and debugging information on the device. This data may also
|
||||
be collected by Google
|
||||
[if sharing usage and diagnostics data is enabled](https://support.google.com/accounts/answer/6078260)
|
||||
by the user of the device. Apps can opt-out of contributing to platform
|
||||
diagnostics for Transformer with
|
||||
`Transformer.Builder.setUsePlatformDiagnostics(false)`.
|
||||
* Split `InAppMuxer` into `InAppMp4Muxer` and `InAppFragmentedMp4Muxer`.
|
||||
You use `InAppMp4Muxer` to produce a non-fragmented MP4 file, while
|
||||
`InAppFragmentedMp4Muxer` is for producing a fragmented MP4 file.
|
||||
* Move `Muxer` interface from `media3-muxer` to `media3-transformer`.
|
||||
* Add `MediaProjectionAssetLoader`, which provides media from a
|
||||
`MediaProjection` for screen recording, and add support for screen
|
||||
recording to the Transformer demo app.
|
||||
* Add `#getInputFormat()` to `Codec` interface.
|
||||
* Shift the responsibility to release the `GlObjectsProvider` onto the
|
||||
caller in `DefaultVideoFrameProcessor` and `DefaultVideoCompositor` when
|
||||
possible.
|
||||
* Extractors:
|
||||
* AVI: Fix handling of files with constant bitrate compressed audio where
|
||||
the stream header stores the number of bytes instead of the number of
|
||||
chunks.
|
||||
* Fix handling of NAL units with lengths expressed in 1 or 2 bytes (rather
|
||||
than 4).
|
||||
* Fix `ArrayIndexOutOfBoundsException` in MP4 edit lists when the edit
|
||||
list starts at a non-sync frame with no preceding sync frame
|
||||
([#2062](https://github.com/androidx/media/issues/2062)).
|
||||
* Fix issue where TS streams can get stuck on some devices
|
||||
([#2069](https://github.com/androidx/media/issues/2069)).
|
||||
* FLAC: Add support for 32-bit FLAC files. Previously these would fail to
|
||||
play with `IllegalStateException: Playback stuck buffering and not
|
||||
loading` ([#2197](https://github.com/androidx/media/issues/2197)).
|
||||
* Audio:
|
||||
* Fix `onAudioPositionAdvancing` to be called when playback resumes
|
||||
(previously it was called when playback was paused).
|
||||
* Don't bypass `SonicAudioProcessor` when `SpeedChangingAudioProcessor` is
|
||||
configured with default parameters.
|
||||
* Fix underflow in `Sonic#getOutputSize()` that could cause
|
||||
`DefaultAudioSink` to stall.
|
||||
* Fix `MediaCodecAudioRenderer.getDurationToProgressUs()` and
|
||||
`DecoderAudioRenderer.getDurationToProgressUs()` so that seeks correctly
|
||||
reset the provided durations.
|
||||
* Make `androidx.media3.common.audio.SonicAudioProcessor` final.
|
||||
* Add support for float PCM to `ChannelMappingAudioProcessor` and
|
||||
`TrimmingAudioProcessor`.
|
||||
* Video:
|
||||
* Fix `MediaCodecVideoRenderer` such that when without a `Surface`, the
|
||||
renderer skips just-early frames only if the
|
||||
`VideoFrameReleaseControl.getFrameReleaseAction` is not
|
||||
`FRAME_RELEASE_TRY_AGAIN_LATER`.
|
||||
* Change `MediaCodecVideoRenderer.shouldUsePlaceholderSurface` to
|
||||
protected so that applications can override to block usage of
|
||||
placeholder surfaces
|
||||
([#1905](https://github.com/androidx/media/pull/1905)).
|
||||
* Add experimental `ExoPlayer` AV1 sample dependency parsing to speed up
|
||||
seeking. Enable it with the new
|
||||
`DefaultRenderersFactory.experimentalSetParseAv1SampleDependencies` API.
|
||||
* Add experimental `ExoPlayer` API to drop late `MediaCodecVideoRenderer`
|
||||
decoder input buffers that are not depended on. Enable it with
|
||||
`DefaultRenderersFactory.experimentalSetLateThresholdToDropDecoderInputUs`.
|
||||
* Fix issue where a player without a surface was ready immediately and
|
||||
very slow decoding any pending frames
|
||||
([#1973](https://github.com/androidx/media/issues/1973)).
|
||||
* Exclude Xiaomi and OPPO devices from detached surface mode to avoid
|
||||
screen flickering
|
||||
([#2059](https://github.com/androidx/media/issues/2059)).
|
||||
* Text:
|
||||
* Add support for VobSub subtitles
|
||||
([#8260](https://github.com/google/ExoPlayer/issues/8260)).
|
||||
* Stop eagerly loading all subtitle files configured with
|
||||
`MediaItem.Builder.setSubtitleConfigurations`, and instead only load one
|
||||
if it is selected by track selection
|
||||
([#1721](https://github.com/androidx/media/issues/1721)).
|
||||
* TTML: Add support for referencing `tts:origin` and `tts:extent` using
|
||||
`style` ([#2953](https://github.com/google/ExoPlayer/issues/2953)).
|
||||
* Restrict WebVTT and SubRip timestamps to exactly 3 decimal places.
|
||||
Previously we incorrectly parsed any number of decimal places but always
|
||||
assumed the value was in milliseconds, leading to incorrect timestamps
|
||||
([#1997](https://github.com/androidx/media/issues/1997)).
|
||||
* Fix playback hanging when a playlist contains clipped items with CEA-608
|
||||
or CEA-708 captions.
|
||||
* Fix `IllegalStateException` when an SSA file contains a cue with zero
|
||||
duration (start and end time equal)
|
||||
([#2052](https://github.com/androidx/media/issues/2052)).
|
||||
* Suppress (and log) subtitle parsing errors when subtitles are muxed into
|
||||
the same container as audio and video
|
||||
([#2052](https://github.com/androidx/media/issues/2052)).
|
||||
* Fix handling of multi-byte UTF-8 characters in WebVTT files using CR
|
||||
line endings ([#2167](https://github.com/androidx/media/issues/2167)).
|
||||
* DRM:
|
||||
* Fix `MediaCodec$CryptoException: Operation not supported in this
|
||||
configuration` error when playing ClearKey content on API < 27 devices
|
||||
([#1732](https://github.com/androidx/media/issues/1732)).
|
||||
* Effect:
|
||||
* Moved the functionality of `OverlaySettings` into
|
||||
`StaticOverlaySettings`. `OverlaySettings` can be subclassed to allow
|
||||
@ -306,18 +271,49 @@ This release includes the following changes since the
|
||||
* Muxers:
|
||||
* Moved `MuxerException` out of `Muxer` interface to avoid a very long
|
||||
fully qualified name.
|
||||
* Renamed `setSampleCopyEnabled()` method to `setSampleCopyingEnabled()`
|
||||
in both `Mp4Muxer.Builder` and `FragmentedMp4Muxer.Builder`.
|
||||
* `Mp4Muxer.addTrack()` and `FragmentedMp4Muxer.addTrack()` now return an
|
||||
`int` track ID instead of a `TrackToken`.
|
||||
* `Mp4Muxer` and `FragmentedMp4Muxer` no longer implement `Muxer`
|
||||
interface.
|
||||
* Disable `Mp4Muxer` sample batching and copying by default.
|
||||
* Fix a bug in `FragmentedMp4Muxer` that creates a lot of fragments when
|
||||
only audio track is written.
|
||||
* Session:
|
||||
* Keep foreground service state for an additional 10 minutes when playback
|
||||
pauses, stops or fails. This allows users to resume playback within this
|
||||
timeout without risking foreground service restrictions on various
|
||||
devices. Note that simply calling `player.pause()` can no longer be used
|
||||
to stop the foreground service before `stopSelf()` when overriding
|
||||
`onTaskRemoved`, use `MediaSessionService.pauseAllPlayersAndStopSelf()`
|
||||
instead.
|
||||
* Keep notification visible when playback enters an error or stopped
|
||||
state. The notification is only removed if the playlist is cleared or
|
||||
the player is released.
|
||||
* Improve handling of Android platform MediaSession actions ACTION_PLAY
|
||||
and ACTION_PAUSE to only set one of them according to the available
|
||||
commands and also accept if only one of them is set.
|
||||
* Add `Context` as a parameter to
|
||||
`MediaButtonReceiver.shouldStartForegroundService`
|
||||
([#1887](https://github.com/androidx/media/issues/1887)).
|
||||
* Fix bug where calling a `Player` method on a `MediaController` connected
|
||||
to a legacy session dropped changes from a pending update.
|
||||
* Make `MediaSession.setSessionActivity(PendingIntent)` accept null
|
||||
([#2109](https://github.com/androidx/media/issues/2109)).
|
||||
* Fix bug where a stale notification stays visible when the playlist is
|
||||
cleared ([#2211](https://github.com/androidx/media/issues/2211)).
|
||||
* UI:
|
||||
* Add `PlayerSurface` Composable to `media3-ui-compose` module.
|
||||
* Add `PlayPauseButtonState`, `NextButtonState`, `PreviousButtonState`,
|
||||
`RepeatButtonState`, `ShuffleButtonState` classes and the corresponding
|
||||
`rememberPlayPauseButtonState`, `rememberNextButtonState`,
|
||||
`rememberPreviousButtonState`, `rememberRepeatButtonState`,
|
||||
`rememberShuffleButtonState` Composables to `media3-ui-compose` module.
|
||||
* HLS Extension:
|
||||
* Add state holders and composables to the `media3-ui-compose` module for
|
||||
`PlayerSurface`, `PresentationState`, `PlayPauseButtonState`,
|
||||
`NextButtonState`, `PreviousButtonState`, `RepeatButtonState`,
|
||||
`ShuffleButtonState` and `PlaybackSpeedState`.
|
||||
* Downloads:
|
||||
* Fix bug in `CacheWriter` that leaves data sources open and cache areas
|
||||
locked in case the data source throws an `Exception` other than
|
||||
`IOException`
|
||||
([#9760](https://github.com/google/ExoPlayer/issues/9760)).
|
||||
* HLS extension:
|
||||
* Add a first version of `HlsInterstitialsAdsLoader`. The ads loader reads
|
||||
the HLS interstitials of an HLS media playlist and maps them to the
|
||||
`AdPlaybackState` that is passed to the `AdsMediaSource`. This initial
|
||||
@ -325,19 +321,40 @@ This release includes the following changes since the
|
||||
* Add `HlsInterstitialsAdsLoader.AdsMediaSourceFactory`. Apps can use it
|
||||
to create `AdsMediaSource` instances that use an
|
||||
`HlsInterstitialsAdsLoader` in a convenient and safe way.
|
||||
* DASH Extension:
|
||||
* Parse `SUPPLEMENTAL-CODECS` tag from HLS playlist to detect Dolby Vision
|
||||
formats ([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* Loosen the condition for seeking to sync positions in an HLS stream
|
||||
([#2209](https://github.com/androidx/media/issues/2209)).
|
||||
* DASH extension:
|
||||
* Add AC-4 Level-4 format support for DASH
|
||||
([#1898](https://github.com/androidx/media/pull/1898)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* Fix issue when calculating the update interval for ad insertion in
|
||||
multi-period live streams
|
||||
([#1698](https://github.com/androidx/media/issues/1698)).
|
||||
* Parse `scte214:supplementalCodecs` attribute from DASH manifest to
|
||||
detect Dolby Vision formats
|
||||
([#1785](https://github.com/androidx/media/pull/1785)).
|
||||
* Improve handling of period transitions in live streams where the period
|
||||
contains media samples beyond the declared period duration
|
||||
([#1698](https://github.com/androidx/media/issues/1698)).
|
||||
* Fix issue where adaptation sets marked with `adaptation-set-switching`
|
||||
but different languages or role flags are merged together
|
||||
([#2222](https://github.com/androidx/media/issues/2222)).
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* Add the MPEG-H decoder module which uses the native MPEG-H decoder
|
||||
module to decode MPEG-H audio
|
||||
([#1826](https://github.com/androidx/media/pull/1826)).
|
||||
* MIDI extension:
|
||||
* Plumb custom `AudioSink` and `AudioRendererEventListener` instances into
|
||||
`MidiRenderer`.
|
||||
* Cast extension:
|
||||
* Bump the `play-services-cast-framework` dependency to 21.5.0 to fix a
|
||||
`FLAG_MUTABLE` crash in apps targeting API 34+ on devices with Google
|
||||
Play services installed but disabled
|
||||
([#2178](https://github.com/androidx/media/issues/2178)).
|
||||
* Demo app:
|
||||
* Add `MinimalControls` (`PlayPauseButton`, `NextButton`,
|
||||
`PreviousButton`) and `ExtraControls` (`RepeatButton`, `ShuffleButton`)
|
||||
Composable UI elements to `demo-compose` utilizing
|
||||
`PlayPauseButtonState`, `NextButtonState`, `PreviousButtonState`,
|
||||
`RepeatButtonState`, `ShuffleButtonState`.
|
||||
* Extend `demo-compose` with additional buttons and enhance
|
||||
`PlayerSurface` integration with scaling and shutter support.
|
||||
* Remove deprecated symbols:
|
||||
* Remove deprecated `AudioMixer.create()` method. Use
|
||||
`DefaultAudioMixer.Factory().create()` instead.
|
||||
@ -391,6 +408,47 @@ This release includes the following changes since the
|
||||
`BaseGlShaderProgram` instead.
|
||||
* Remove `Transformer.flattenForSlowMotion`. Use
|
||||
`EditedMediaItem.flattenForSlowMotion` instead.
|
||||
* Removed `ExoPlayer.VideoComponent`, `ExoPlayer.AudioComponent`,
|
||||
`ExoPlayer.TextComponent` and `ExoPlayer.DeviceComponent`.
|
||||
* Removed `androidx.media3.exoplayer.audio.SonicAudioProcessor`.
|
||||
* Removed the following deprecated `DownloadHelper` methods:
|
||||
* Constructor `DownloadHelper(MediaItem, @Nullable MediaSource,
|
||||
TrackSelectionParameters, RendererCapabilities[])`, use
|
||||
`DownloadHelper(MediaItem, @Nullable MediaSource,
|
||||
TrackSelectionParameters, RendererCapabilitiesList)` instead.
|
||||
* `getRendererCapabilities(RenderersFactory)`, equivalent
|
||||
functionality can be achieved by creating a
|
||||
`DefaultRendererCapabilitiesList` with a `RenderersFactory`, and
|
||||
calling `DefaultRendererCapabilitiesList.getRendererCapabilities()`.
|
||||
* Removed
|
||||
`PlayerNotificationManager.setMediaSessionToken(MediaSessionCompat)`
|
||||
method. Use
|
||||
`PlayerNotificationManager.setMediaSessionToken(MediaSession.Token)` and
|
||||
pass in `(MediaSession.Token) compatToken.getToken()` instead.
|
||||
|
||||
### 1.6.0-rc02 (2025-03-18)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
### 1.6.0-rc01 (2025-03-12)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
### 1.6.0-beta01 (2025-02-26)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
### 1.6.0-alpha03 (2025-02-06)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
### 1.6.0-alpha02 (2025-01-30)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
### 1.6.0-alpha01 (2024-12-20)
|
||||
|
||||
Use the 1.6.0 [stable version](#160-2025-03-26).
|
||||
|
||||
## 1.5
|
||||
|
||||
@ -704,19 +762,19 @@ This release includes the following changes since the
|
||||
[#184](https://github.com/androidx/media/issues/184)).
|
||||
* Fix bug where the "None" choice in the text selection is not working if
|
||||
there are app-defined text track selection preferences.
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Add support for periods starting in the middle of a segment
|
||||
([#1440](https://github.com/androidx/media/issues/1440)).
|
||||
* Smooth Streaming Extension:
|
||||
* Smooth Streaming extension:
|
||||
* Fix a `Bad magic number for Bundle` error when playing SmoothStreaming
|
||||
streams with text tracks
|
||||
([#1779](https://github.com/androidx/media/issues/1779)).
|
||||
* RTSP Extension:
|
||||
* RTSP extension:
|
||||
* Fix user info removal for URLs that contain encoded @ characters
|
||||
([#1138](https://github.com/androidx/media/pull/1138)).
|
||||
* Fix crashing when parsing of RTP packets with header extensions
|
||||
([#1225](https://github.com/androidx/media/pull/1225)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, etc.):
|
||||
* Add the IAMF decoder module, which provides support for playback of MP4
|
||||
files containing IAMF tracks using the libiamf native library to
|
||||
synthesize audio.
|
||||
@ -725,7 +783,7 @@ This release includes the following changes since the
|
||||
binaural playback support is currently not available.
|
||||
* Add 16 KB page support for decoder extensions on Android 15
|
||||
([#1685](https://github.com/androidx/media/issues/1685)).
|
||||
* Cast Extension:
|
||||
* Cast extension:
|
||||
* Stop cleaning the timeline after the CastSession disconnects, which
|
||||
enables the sender app to resume playback locally after a disconnection.
|
||||
* Populate CastPlayer's `DeviceInfo` when a `Context` is provided. This
|
||||
@ -806,7 +864,7 @@ This release includes the following changes since the
|
||||
`MediaButtonReceiver` when deciding whether to ignore it to avoid a
|
||||
`ForegroundServiceDidNotStartInTimeException`
|
||||
([#1581](https://github.com/androidx/media/issues/1581)).
|
||||
* RTSP Extension:
|
||||
* RTSP extension:
|
||||
* Skip invalid Media Descriptions in SDP parsing
|
||||
([#1087](https://github.com/androidx/media/issues/1472)).
|
||||
|
||||
@ -1151,12 +1209,12 @@ This release includes the following changes since the
|
||||
instances, which can eventually result in an app crashing with
|
||||
`IllegalStateException: Too many receivers, total of 1000, registered
|
||||
for pid` ([#1224](https://github.com/androidx/media/issues/1224)).
|
||||
* Cronet Extension:
|
||||
* Cronet extension:
|
||||
* Fix `SocketTimeoutException` in `CronetDataSource`. In some versions of
|
||||
Cronet, the request provided by the callback is not always the same.
|
||||
This leads to callback not completing and request timing out
|
||||
(https://issuetracker.google.com/328442628).
|
||||
* HLS Extension:
|
||||
* HLS extension:
|
||||
* Fix bug where pending EMSG samples waiting for a discontinuity were
|
||||
delegated in `HlsSampleStreamWrapper` with an incorrect offset causing
|
||||
an `IndexOutOfBoundsException` or an `IllegalArgumentException`
|
||||
@ -1170,13 +1228,13 @@ This release includes the following changes since the
|
||||
* Fix bug where enabling CMCD for HLS live streams causes
|
||||
`ArrayIndexOutOfBoundsException`
|
||||
([#1395](https://github.com/androidx/media/issues/1395)).
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Fix bug where re-preparing a multi-period live stream can throw an
|
||||
`IndexOutOfBoundsException`
|
||||
([#1329](https://github.com/androidx/media/issues/1329)).
|
||||
* Add support for `dashif:Laurl` license urls
|
||||
([#1345](https://github.com/androidx/media/issues/1345)).
|
||||
* Cast Extension:
|
||||
* Cast extension:
|
||||
* Fix bug that converted the album title of the `MediaQueueItem` to the
|
||||
artist in the Media3 media item
|
||||
([#1255](https://github.com/androidx/media/pull/1255)).
|
||||
@ -1324,13 +1382,13 @@ This release includes the following changes since the
|
||||
* Fallback to include audio track language name if `Locale` cannot
|
||||
identify a display name
|
||||
([#988](https://github.com/androidx/media/issues/988)).
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Populate all `Label` elements from the manifest into `Format.labels`
|
||||
([#1054](https://github.com/androidx/media/pull/1054)).
|
||||
* RTSP Extension:
|
||||
* RTSP extension:
|
||||
* Skip empty session information values (i-tags) in SDP parsing
|
||||
([#1087](https://github.com/androidx/media/issues/1087)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Disable the MIDI extension as a local dependency by default because it
|
||||
requires an additional Maven repository to be configured. Users who need
|
||||
this module from a local dependency
|
||||
@ -1483,12 +1541,12 @@ This release includes the following changes since the
|
||||
not transmitted between media controllers and sessions.
|
||||
* Add constructor to `MediaLibrarySession.Builder` that only takes a
|
||||
`Context` instead of a `MediaLibraryService`.
|
||||
* HLS Extension:
|
||||
* HLS extension:
|
||||
* Reduce `HlsMediaPeriod` to package-private visibility. This type
|
||||
shouldn't be directly depended on from outside the HLS package.
|
||||
* Resolve seeks to beginning of a segment more efficiently
|
||||
([#1031](https://github.com/androidx/media/pull/1031)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* MIDI decoder: Ignore SysEx event messages
|
||||
([#710](https://github.com/androidx/media/pull/710)).
|
||||
* Test Utilities:
|
||||
@ -1586,16 +1644,16 @@ This release includes the following changes since the
|
||||
* Fix issue where the numbers in the fast forward button of the
|
||||
`PlayerControlView` were misaligned
|
||||
([#547](https://github.com/androidx/media/issues/547)).
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Parse "f800" as channel count of 5 for Dolby in DASH manifest
|
||||
([#688](https://github.com/androidx/media/issues/688)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* MIDI: Fix issue where seeking forward skips the Program Change events
|
||||
([#704](https://github.com/androidx/media/issues/704)).
|
||||
* Migrate to FFmpeg 6.0 and update supported NDK to `r26b`
|
||||
([#707](https://github.com/androidx/media/pull/707),
|
||||
[#867](https://github.com/androidx/media/pull/867)).
|
||||
* Cast Extension:
|
||||
* Cast extension:
|
||||
* Sanitize creation of a `Timeline` to not crash the app when loading
|
||||
media fails on the cast device
|
||||
([#708](https://github.com/androidx/media/issues/708)).
|
||||
@ -1833,11 +1891,11 @@ This release includes the following changes since the
|
||||
add `dataSync` as `foregroundServiceType` in the manifest and add the
|
||||
`FOREGROUND_SERVICE_DATA_SYNC` permission
|
||||
([#11239](https://github.com/google/ExoPlayer/issues/11239)).
|
||||
* HLS Extension:
|
||||
* HLS extension:
|
||||
* Refresh the HLS live playlist with an interval calculated from the last
|
||||
load start time rather than the last load completed time
|
||||
([#663](https://github.com/androidx/media/issues/663)).
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Allow multiple of the same DASH identifier in segment template url.
|
||||
* Add experimental support for parsing subtitles during extraction. This
|
||||
has better support for merging overlapping subtitles, including
|
||||
@ -1845,7 +1903,7 @@ This release includes the following changes since the
|
||||
can enable this using
|
||||
`DashMediaSource.Factory.experimentalParseSubtitlesDuringExtraction()`
|
||||
([#288](https://github.com/androidx/media/issues/288)).
|
||||
* RTSP Extension:
|
||||
* RTSP extension:
|
||||
* Fix a race condition that could lead to `IndexOutOfBoundsException` when
|
||||
falling back to TCP, or playback hanging in some situations.
|
||||
* Check state in RTSP setup when returning loading state of
|
||||
@ -1856,7 +1914,7 @@ This release includes the following changes since the
|
||||
* Use RTSP Setup Response timeout value in time interval of sending
|
||||
keep-alive RTSP Options requests
|
||||
([#662](https://github.com/androidx/media/issues/662)).
|
||||
* Decoder Extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Decoder extensions (FFmpeg, VP9, AV1, MIDI, etc.):
|
||||
* Release the MIDI decoder module, which provides support for playback of
|
||||
standard MIDI files using the Jsyn library to synthesize audio.
|
||||
* Add `DecoderOutputBuffer.shouldBeSkipped` to directly mark output
|
||||
@ -2133,20 +2191,20 @@ This release contains the following changes since the
|
||||
* Add Util methods `shouldShowPlayButton` and
|
||||
`handlePlayPauseButtonAction` to write custom UI elements with a
|
||||
play/pause button.
|
||||
* RTSP Extension:
|
||||
* RTSP extension:
|
||||
* For MPEG4-LATM, use default profile-level-id value if absent in Describe
|
||||
Response SDP message
|
||||
([#302](https://github.com/androidx/media/issues/302)).
|
||||
* Use base Uri for relative path resolution from the RTSP session if
|
||||
present in DESCRIBE response header
|
||||
([#11160](https://github.com/google/ExoPlayer/issues/11160)).
|
||||
* DASH Extension:
|
||||
* DASH extension:
|
||||
* Remove the media time offset from `MediaLoadData.startTimeMs` and
|
||||
`MediaLoadData.endTimeMs` for multi period DASH streams.
|
||||
* Fix a bug where re-preparing a multi-period live Dash media source
|
||||
produced a `IndexOutOfBoundsException`
|
||||
([#10838](https://github.com/google/ExoPlayer/issues/10838)).
|
||||
* HLS Extension:
|
||||
* HLS extension:
|
||||
* Add
|
||||
`HlsMediaSource.Factory.setTimestampAdjusterInitializationTimeoutMs(long)`
|
||||
to set a timeout for the loading thread to wait for the
|
||||
|
@ -17,7 +17,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:8.3.2'
|
||||
classpath 'com.android.tools.build:gradle:8.8.1'
|
||||
classpath 'com.google.android.gms:strict-version-matcher-plugin:1.2.4'
|
||||
classpath 'org.jetbrains.kotlin:kotlin-gradle-plugin:2.0.20'
|
||||
classpath 'org.jetbrains.kotlin:compose-compiler-gradle-plugin:2.0.20'
|
||||
|
@ -12,8 +12,8 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
project.ext {
|
||||
releaseVersion = '1.6.0-beta01'
|
||||
releaseVersionCode = 1_006_000_1_01
|
||||
releaseVersion = '1.6.0'
|
||||
releaseVersionCode = 1_006_000_3_00
|
||||
minSdkVersion = 21
|
||||
// See https://developer.android.com/training/cars/media/automotive-os#automotive-module
|
||||
automotiveMinSdkVersion = 28
|
||||
|
@ -30,6 +30,7 @@ internal fun ExtraControls(player: Player, modifier: Modifier = Modifier) {
|
||||
horizontalArrangement = Arrangement.Center,
|
||||
verticalAlignment = Alignment.CenterVertically,
|
||||
) {
|
||||
PlaybackSpeedPopUpButton(player)
|
||||
ShuffleButton(player)
|
||||
RepeatButton(player)
|
||||
}
|
||||
|
@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package androidx.media3.demo.compose.buttons
|
||||
|
||||
import android.view.Gravity
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.layout.Arrangement
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.Column
|
||||
import androidx.compose.foundation.layout.fillMaxWidth
|
||||
import androidx.compose.foundation.layout.wrapContentSize
|
||||
import androidx.compose.foundation.layout.wrapContentWidth
|
||||
import androidx.compose.foundation.text.BasicText
|
||||
import androidx.compose.material3.Text
|
||||
import androidx.compose.material3.TextButton
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.platform.LocalView
|
||||
import androidx.compose.ui.text.font.FontWeight
|
||||
import androidx.compose.ui.window.Dialog
|
||||
import androidx.compose.ui.window.DialogProperties
|
||||
import androidx.compose.ui.window.DialogWindowProvider
|
||||
import androidx.media3.common.Player
|
||||
import androidx.media3.ui.compose.state.rememberPlaybackSpeedState
|
||||
|
||||
@Composable
|
||||
internal fun PlaybackSpeedPopUpButton(
|
||||
player: Player,
|
||||
modifier: Modifier = Modifier,
|
||||
speedSelection: List<Float> = listOf(0.5f, 0.75f, 1.0f, 1.25f, 1.5f, 1.75f, 2.0f),
|
||||
) {
|
||||
val state = rememberPlaybackSpeedState(player)
|
||||
var openDialog by remember { mutableStateOf(false) }
|
||||
TextButton(onClick = { openDialog = true }, modifier = modifier, enabled = state.isEnabled) {
|
||||
// TODO: look into TextMeasurer to ensure 1.1 and 2.2 occupy the same space
|
||||
BasicText("%.1fx".format(state.playbackSpeed))
|
||||
}
|
||||
if (openDialog) {
|
||||
BottomDialogOfChoices(
|
||||
currentSpeed = state.playbackSpeed,
|
||||
choices = speedSelection,
|
||||
onDismissRequest = { openDialog = false },
|
||||
onSelectChoice = state::updatePlaybackSpeed,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun BottomDialogOfChoices(
|
||||
currentSpeed: Float,
|
||||
choices: List<Float>,
|
||||
onDismissRequest: () -> Unit,
|
||||
onSelectChoice: (Float) -> Unit,
|
||||
) {
|
||||
Dialog(
|
||||
onDismissRequest = onDismissRequest,
|
||||
properties = DialogProperties(usePlatformDefaultWidth = false),
|
||||
) {
|
||||
val dialogWindowProvider = LocalView.current.parent as? DialogWindowProvider
|
||||
dialogWindowProvider?.window?.let { window ->
|
||||
window.setGravity(Gravity.BOTTOM) // Move down, by default dialogs are in the centre
|
||||
window.setDimAmount(0f) // Remove dimmed background of ongoing playback
|
||||
}
|
||||
|
||||
Box(modifier = Modifier.wrapContentSize().background(Color.LightGray)) {
|
||||
Column(
|
||||
modifier = Modifier.fillMaxWidth().wrapContentWidth(),
|
||||
verticalArrangement = Arrangement.Center,
|
||||
horizontalAlignment = Alignment.CenterHorizontally,
|
||||
) {
|
||||
choices.forEach { speed ->
|
||||
TextButton(
|
||||
onClick = {
|
||||
onSelectChoice(speed)
|
||||
onDismissRequest()
|
||||
}
|
||||
) {
|
||||
var fontWeight = FontWeight(400)
|
||||
if (speed == currentSpeed) {
|
||||
fontWeight = FontWeight(1000)
|
||||
}
|
||||
Text("%.1fx".format(speed), fontWeight = fontWeight)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -217,13 +217,11 @@ public final class CompositionPreviewActivity extends AppCompatActivity {
|
||||
String selectedResolutionHeight = String.valueOf(resolutionHeightSpinner.getSelectedItem());
|
||||
if (!SAME_AS_INPUT_OPTION.equals(selectedResolutionHeight)) {
|
||||
int resolutionHeight = Integer.parseInt(selectedResolutionHeight);
|
||||
videoEffectsBuilder.add(LanczosResample.scaleToFit(10000, resolutionHeight));
|
||||
videoEffectsBuilder.add(Presentation.createForHeight(resolutionHeight));
|
||||
videoEffectsBuilder.add(
|
||||
LanczosResample.scaleToFitWithFlexibleOrientation(10000, resolutionHeight));
|
||||
videoEffectsBuilder.add(Presentation.createForShortSide(resolutionHeight));
|
||||
}
|
||||
ImmutableList<Effect> videoEffects = videoEffectsBuilder.build();
|
||||
// Preview requires all sequences to be the same duration, so calculate main sequence duration
|
||||
// and limit background sequence duration to match.
|
||||
long videoSequenceDurationUs = 0;
|
||||
for (int i = 0; i < selectedMediaItems.length; i++) {
|
||||
if (selectedMediaItems[i]) {
|
||||
SonicAudioProcessor pitchChanger = new SonicAudioProcessor();
|
||||
@ -240,7 +238,6 @@ public final class CompositionPreviewActivity extends AppCompatActivity {
|
||||
/* audioProcessors= */ ImmutableList.of(pitchChanger),
|
||||
/* videoEffects= */ videoEffects))
|
||||
.setDurationUs(presetDurationsUs[i]);
|
||||
videoSequenceDurationUs += presetDurationsUs[i];
|
||||
mediaItems.add(itemBuilder.build());
|
||||
}
|
||||
}
|
||||
@ -248,7 +245,7 @@ public final class CompositionPreviewActivity extends AppCompatActivity {
|
||||
List<EditedMediaItemSequence> compositionSequences = new ArrayList<>();
|
||||
compositionSequences.add(videoSequence);
|
||||
if (includeBackgroundAudioTrack) {
|
||||
compositionSequences.add(getAudioBackgroundSequence(Util.usToMs(videoSequenceDurationUs)));
|
||||
compositionSequences.add(getAudioBackgroundSequence());
|
||||
}
|
||||
SonicAudioProcessor sampleRateChanger = new SonicAudioProcessor();
|
||||
sampleRateChanger.setOutputSampleRateHz(8_000);
|
||||
@ -264,19 +261,11 @@ public final class CompositionPreviewActivity extends AppCompatActivity {
|
||||
.build();
|
||||
}
|
||||
|
||||
private EditedMediaItemSequence getAudioBackgroundSequence(long durationMs) {
|
||||
MediaItem audioMediaItem =
|
||||
new MediaItem.Builder()
|
||||
.setUri(AUDIO_URI)
|
||||
.setClippingConfiguration(
|
||||
new MediaItem.ClippingConfiguration.Builder()
|
||||
.setStartPositionMs(0)
|
||||
.setEndPositionMs(durationMs)
|
||||
.build())
|
||||
.build();
|
||||
private static EditedMediaItemSequence getAudioBackgroundSequence() {
|
||||
MediaItem audioMediaItem = new MediaItem.Builder().setUri(AUDIO_URI).build();
|
||||
EditedMediaItem audioItem =
|
||||
new EditedMediaItem.Builder(audioMediaItem).setDurationUs(59_000_000).build();
|
||||
return new EditedMediaItemSequence.Builder(audioItem).build();
|
||||
return new EditedMediaItemSequence.Builder(audioItem).setIsLooping(true).build();
|
||||
}
|
||||
|
||||
private void previewComposition() {
|
||||
|
@ -1,7 +1,10 @@
|
||||
# Effect demo
|
||||
|
||||
This app demonstrates how to use the [Effect][] API to modify videos. It uses
|
||||
[setVideoEffects] method to add different effects to [ExoPlayer].
|
||||
`setVideoEffects` method to add different effects to [ExoPlayer][].
|
||||
|
||||
See the [demos README](../README.md) for instructions on how to build and run
|
||||
this demo.
|
||||
|
||||
[Effect]: https://github.com/androidx/media/tree/release/libraries/effect
|
||||
[ExoPlayer]: https://github.com/androidx/media/tree/release/libraries/exoplayer
|
||||
|
@ -257,6 +257,10 @@
|
||||
{
|
||||
"name": "Apple media playlist (AAC)",
|
||||
"uri": "https://devstreaming-cdn.apple.com/videos/streaming/examples/bipbop_4x3/gear0/prog_index.m3u8"
|
||||
},
|
||||
{
|
||||
"name": "Bitmovin (FMP4)",
|
||||
"uri": "https://bitdash-a.akamaihd.net/content/MI201109210084_1/m3u8s-fmp4/f08e80da-bf1d-4e3d-8899-f0f6155f6efa.m3u8"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -168,6 +168,7 @@ public class DownloadTracker {
|
||||
private final DownloadHelper downloadHelper;
|
||||
private final MediaItem mediaItem;
|
||||
|
||||
private boolean tracksInfoAvailable;
|
||||
private TrackSelectionDialog trackSelectionDialog;
|
||||
private WidevineOfflineLicenseFetchTask widevineOfflineLicenseFetchTask;
|
||||
@Nullable private byte[] keySetId;
|
||||
@ -193,7 +194,8 @@ public class DownloadTracker {
|
||||
// DownloadHelper.Callback implementation.
|
||||
|
||||
@Override
|
||||
public void onPrepared(DownloadHelper helper) {
|
||||
public void onPrepared(DownloadHelper helper, boolean tracksInfoAvailable) {
|
||||
this.tracksInfoAvailable = tracksInfoAvailable;
|
||||
@Nullable Format format = getFirstFormatWithDrmInitData(helper);
|
||||
if (format == null) {
|
||||
onDownloadPrepared(helper);
|
||||
@ -237,6 +239,7 @@ public class DownloadTracker {
|
||||
|
||||
@Override
|
||||
public void onTracksSelected(TrackSelectionParameters trackSelectionParameters) {
|
||||
checkState(tracksInfoAvailable);
|
||||
for (int periodIndex = 0; periodIndex < downloadHelper.getPeriodCount(); periodIndex++) {
|
||||
downloadHelper.clearTrackSelections(periodIndex);
|
||||
downloadHelper.addTrackSelection(periodIndex, trackSelectionParameters);
|
||||
@ -265,6 +268,9 @@ public class DownloadTracker {
|
||||
*/
|
||||
@Nullable
|
||||
private Format getFirstFormatWithDrmInitData(DownloadHelper helper) {
|
||||
if (!tracksInfoAvailable) {
|
||||
return null;
|
||||
}
|
||||
for (int periodIndex = 0; periodIndex < helper.getPeriodCount(); periodIndex++) {
|
||||
MappedTrackInfo mappedTrackInfo = helper.getMappedTrackInfo(periodIndex);
|
||||
for (int rendererIndex = 0;
|
||||
@ -304,6 +310,13 @@ public class DownloadTracker {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!tracksInfoAvailable) {
|
||||
Log.d(TAG, "Tracks info is unavailable. Downloading entire stream.");
|
||||
startDownload();
|
||||
downloadHelper.release();
|
||||
return;
|
||||
}
|
||||
|
||||
Tracks tracks = downloadHelper.getTracks(/* periodIndex= */ 0);
|
||||
if (!TrackSelectionDialog.willHaveContent(tracks)) {
|
||||
Log.d(TAG, "No dialog content. Downloading entire stream.");
|
||||
|
@ -61,7 +61,6 @@ import androidx.media3.datasource.DataSourceUtil;
|
||||
import androidx.media3.datasource.DataSpec;
|
||||
import androidx.media3.exoplayer.RenderersFactory;
|
||||
import androidx.media3.exoplayer.offline.DownloadService;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.io.IOException;
|
||||
@ -74,6 +73,7 @@ import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
@ -524,7 +524,7 @@ public class SampleChooserActivity extends AppCompatActivity
|
||||
|
||||
private PlaylistGroup getGroup(String groupName, List<PlaylistGroup> groups) {
|
||||
for (int i = 0; i < groups.size(); i++) {
|
||||
if (Objects.equal(groupName, groups.get(i).title)) {
|
||||
if (Objects.equals(groupName, groups.get(i).title)) {
|
||||
return groups.get(i);
|
||||
}
|
||||
}
|
||||
|
@ -677,8 +677,8 @@ public final class TransformerActivity extends AppCompatActivity {
|
||||
int resolutionHeight =
|
||||
bundle.getInt(ConfigurationActivity.RESOLUTION_HEIGHT, /* defaultValue= */ C.LENGTH_UNSET);
|
||||
if (resolutionHeight != C.LENGTH_UNSET) {
|
||||
effects.add(LanczosResample.scaleToFit(10000, resolutionHeight));
|
||||
effects.add(Presentation.createForHeight(resolutionHeight));
|
||||
effects.add(LanczosResample.scaleToFitWithFlexibleOrientation(10000, resolutionHeight));
|
||||
effects.add(Presentation.createForShortSide(resolutionHeight));
|
||||
}
|
||||
|
||||
return effects.build();
|
||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,5 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
@ -24,7 +24,7 @@ android {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api 'com.google.android.gms:play-services-cast-framework:21.3.0'
|
||||
api 'com.google.android.gms:play-services-cast-framework:21.5.0'
|
||||
implementation 'androidx.annotation:annotation:' + androidxAnnotationVersion
|
||||
api project(modulePrefix + 'lib-common')
|
||||
compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
|
||||
|
@ -16,6 +16,7 @@
|
||||
package androidx.media3.cast;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Util.SDK_INT;
|
||||
import static androidx.media3.common.util.Util.castNonNull;
|
||||
import static java.lang.Math.min;
|
||||
@ -73,6 +74,7 @@ import com.google.android.gms.common.api.PendingResult;
|
||||
import com.google.android.gms.common.api.ResultCallback;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
|
||||
/**
|
||||
@ -165,6 +167,7 @@ public final class CastPlayer extends BasePlayer {
|
||||
private long pendingSeekPositionMs;
|
||||
@Nullable private PositionInfo pendingMediaItemRemovalPosition;
|
||||
private MediaMetadata mediaMetadata;
|
||||
private MediaMetadata playlistMetadata;
|
||||
private DeviceInfo deviceInfo;
|
||||
|
||||
/**
|
||||
@ -267,6 +270,7 @@ public final class CastPlayer extends BasePlayer {
|
||||
playbackState = STATE_IDLE;
|
||||
currentTimeline = CastTimeline.EMPTY_CAST_TIMELINE;
|
||||
mediaMetadata = MediaMetadata.EMPTY;
|
||||
playlistMetadata = MediaMetadata.EMPTY;
|
||||
currentTracks = Tracks.EMPTY;
|
||||
availableCommands = new Commands.Builder().addAll(PERMANENT_AVAILABLE_COMMANDS).build();
|
||||
pendingSeekWindowIndex = C.INDEX_UNSET;
|
||||
@ -655,14 +659,19 @@ public final class CastPlayer extends BasePlayer {
|
||||
|
||||
@Override
|
||||
public MediaMetadata getPlaylistMetadata() {
|
||||
// CastPlayer does not currently support metadata.
|
||||
return MediaMetadata.EMPTY;
|
||||
return playlistMetadata;
|
||||
}
|
||||
|
||||
/** This method is not supported and does nothing. */
|
||||
@Override
|
||||
public void setPlaylistMetadata(MediaMetadata mediaMetadata) {
|
||||
// CastPlayer does not currently support metadata.
|
||||
public void setPlaylistMetadata(MediaMetadata playlistMetadata) {
|
||||
checkNotNull(playlistMetadata);
|
||||
if (playlistMetadata.equals(this.playlistMetadata)) {
|
||||
return;
|
||||
}
|
||||
this.playlistMetadata = playlistMetadata;
|
||||
listeners.sendEvent(
|
||||
EVENT_PLAYLIST_METADATA_CHANGED,
|
||||
listener -> listener.onPlaylistMetadataChanged(this.playlistMetadata));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -909,7 +918,7 @@ public final class CastPlayer extends BasePlayer {
|
||||
? currentTimeline.getPeriod(currentWindowIndex, period, /* setIds= */ true).uid
|
||||
: null;
|
||||
if (!playingPeriodChangedByTimelineChange
|
||||
&& !Util.areEqual(oldPeriodUid, currentPeriodUid)
|
||||
&& !Objects.equals(oldPeriodUid, currentPeriodUid)
|
||||
&& pendingSeekCount == 0) {
|
||||
// Report discontinuity and media item auto transition.
|
||||
currentTimeline.getPeriod(oldWindowIndex, period, /* setIds= */ true);
|
||||
|
@ -1800,7 +1800,7 @@ public class CastPlayerTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setMediaItems_doesNotifyOnMetadataChanged() {
|
||||
public void setMediaItems_doesNotifyOnMediaMetadataChanged() {
|
||||
when(mockRemoteMediaClient.queueJumpToItem(anyInt(), anyLong(), eq(null)))
|
||||
.thenReturn(mockPendingResult);
|
||||
ArgumentCaptor<MediaMetadata> metadataCaptor = ArgumentCaptor.forClass(MediaMetadata.class);
|
||||
@ -1827,7 +1827,7 @@ public class CastPlayerTest {
|
||||
.build());
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata intitalMetadata = castPlayer.getMediaMetadata();
|
||||
MediaMetadata initialMetadata = castPlayer.getMediaMetadata();
|
||||
castPlayer.setMediaItems(firstPlaylist, /* startIndex= */ 0, /* startPositionMs= */ 2000L);
|
||||
updateTimeLine(firstPlaylist, /* mediaQueueItemIds= */ new int[] {1}, /* currentItemId= */ 1);
|
||||
MediaMetadata firstMetadata = castPlayer.getMediaMetadata();
|
||||
@ -1850,7 +1850,7 @@ public class CastPlayerTest {
|
||||
secondPlaylist.get(1).mediaMetadata,
|
||||
secondPlaylist.get(0).mediaMetadata)
|
||||
.inOrder();
|
||||
assertThat(intitalMetadata).isEqualTo(MediaMetadata.EMPTY);
|
||||
assertThat(initialMetadata).isEqualTo(MediaMetadata.EMPTY);
|
||||
assertThat(ImmutableList.of(firstMetadata, secondMetadata, thirdMetadata))
|
||||
.containsExactly(
|
||||
firstPlaylist.get(0).mediaMetadata,
|
||||
@ -1898,6 +1898,35 @@ public class CastPlayerTest {
|
||||
verify(mockListener, never()).onMediaMetadataChanged(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setPlaylistMetadata_doesNotifyOnPlaylistMetadataChanged() {
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(MediaMetadata.EMPTY);
|
||||
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
|
||||
|
||||
verify(mockListener).onPlaylistMetadataChanged(metadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setPlaylistMetadata_equalMetadata_doesNotNotifyOnPlaylistMetadataChanged() {
|
||||
castPlayer.addListener(mockListener);
|
||||
|
||||
MediaMetadata metadata = new MediaMetadata.Builder().setArtist("foo").build();
|
||||
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
castPlayer.setPlaylistMetadata(metadata);
|
||||
|
||||
assertThat(castPlayer.getPlaylistMetadata()).isEqualTo(metadata);
|
||||
|
||||
verify(mockListener, times(1)).onPlaylistMetadataChanged(metadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getDeviceInfo_returnsCorrectDeviceInfoWithPlaybackTypeRemote() {
|
||||
DeviceInfo deviceInfo = castPlayer.getDeviceInfo();
|
||||
|
@ -1149,6 +1149,27 @@ public final class AdPlaybackState {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance with ad groups removed until and excluding the first post roll ad group or
|
||||
* the first ad group with {@link AdGroup#timeUs} larger than the given time, in microseconds.
|
||||
*
|
||||
* <p>Any ad group with {@link AdGroup#timeUs} set to {@link C#TIME_END_OF_SOURCE} is considered a
|
||||
* post roll ad group.
|
||||
*/
|
||||
@CheckResult
|
||||
public AdPlaybackState withRemovedAdGroupCountBefore(long timeUs) {
|
||||
int newRemovedAdGroupCount;
|
||||
for (newRemovedAdGroupCount = removedAdGroupCount;
|
||||
newRemovedAdGroupCount < adGroupCount;
|
||||
newRemovedAdGroupCount++) {
|
||||
AdGroup adGroup = getAdGroup(newRemovedAdGroupCount);
|
||||
if (timeUs <= adGroup.timeUs || adGroup.timeUs == C.TIME_END_OF_SOURCE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return withRemovedAdGroupCount(newRemovedAdGroupCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an instance with the specified {@link AdGroup#contentResumeOffsetUs}, in microseconds,
|
||||
* for the specified ad group.
|
||||
@ -1365,7 +1386,7 @@ public final class AdPlaybackState {
|
||||
return false;
|
||||
}
|
||||
AdPlaybackState that = (AdPlaybackState) o;
|
||||
return Util.areEqual(adsId, that.adsId)
|
||||
return Objects.equals(adsId, that.adsId)
|
||||
&& adGroupCount == that.adGroupCount
|
||||
&& adResumePositionUs == that.adResumePositionUs
|
||||
&& contentDurationUs == that.contentDurationUs
|
||||
|
@ -151,15 +151,6 @@ public abstract class BasePlayer implements Player {
|
||||
return getPreviousMediaItemIndex() != C.INDEX_UNSET;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToPreviousMediaItem()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public final void seekToPreviousWindow() {
|
||||
seekToPreviousMediaItem();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void seekToPreviousMediaItem() {
|
||||
seekToPreviousMediaItemInternal(Player.COMMAND_SEEK_TO_PREVIOUS_MEDIA_ITEM);
|
||||
@ -186,47 +177,11 @@ public abstract class BasePlayer implements Player {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public final boolean hasNext() {
|
||||
return hasNextMediaItem();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public final boolean hasNextWindow() {
|
||||
return hasNextMediaItem();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean hasNextMediaItem() {
|
||||
return getNextMediaItemIndex() != C.INDEX_UNSET;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public final void next() {
|
||||
seekToNextMediaItem();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public final void seekToNextWindow() {
|
||||
seekToNextMediaItem();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void seekToNextMediaItem() {
|
||||
seekToNextMediaItemInternal(Player.COMMAND_SEEK_TO_NEXT_MEDIA_ITEM);
|
||||
|
@ -30,6 +30,7 @@ import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.Objects;
|
||||
|
||||
/** Information about the playback device. */
|
||||
public final class DeviceInfo {
|
||||
@ -178,7 +179,7 @@ public final class DeviceInfo {
|
||||
return playbackType == other.playbackType
|
||||
&& minVolume == other.minVolume
|
||||
&& maxVolume == other.maxVolume
|
||||
&& Util.areEqual(routingControllerId, other.routingControllerId);
|
||||
&& Objects.equals(routingControllerId, other.routingControllerId);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -227,5 +228,4 @@ public final class DeviceInfo {
|
||||
.setRoutingControllerId(routingControllerId)
|
||||
.build();
|
||||
}
|
||||
;
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
|
||||
/** Initialization data for one or more DRM schemes. */
|
||||
@ -160,7 +161,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
|
||||
*/
|
||||
@CheckResult
|
||||
public DrmInitData copyWithSchemeType(@Nullable String schemeType) {
|
||||
if (Util.areEqual(this.schemeType, schemeType)) {
|
||||
if (Objects.equals(this.schemeType, schemeType)) {
|
||||
return this;
|
||||
}
|
||||
return new DrmInitData(schemeType, false, schemeDatas);
|
||||
@ -204,7 +205,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
|
||||
return false;
|
||||
}
|
||||
DrmInitData other = (DrmInitData) obj;
|
||||
return Util.areEqual(schemeType, other.schemeType)
|
||||
return Objects.equals(schemeType, other.schemeType)
|
||||
&& Arrays.equals(schemeDatas, other.schemeDatas);
|
||||
}
|
||||
|
||||
@ -352,9 +353,9 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
|
||||
return true;
|
||||
}
|
||||
SchemeData other = (SchemeData) obj;
|
||||
return Util.areEqual(licenseServerUrl, other.licenseServerUrl)
|
||||
&& Util.areEqual(mimeType, other.mimeType)
|
||||
&& Util.areEqual(uuid, other.uuid)
|
||||
return Objects.equals(licenseServerUrl, other.licenseServerUrl)
|
||||
&& Objects.equals(mimeType, other.mimeType)
|
||||
&& Objects.equals(uuid, other.uuid)
|
||||
&& Arrays.equals(data, other.data);
|
||||
}
|
||||
|
||||
|
@ -1039,7 +1039,10 @@ public final class Format {
|
||||
/** The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. */
|
||||
public final int sampleRate;
|
||||
|
||||
/** The {@link C.PcmEncoding} for PCM audio. Set to {@link #NO_VALUE} for other media types. */
|
||||
/**
|
||||
* The {@link C.PcmEncoding} for PCM or losslessly compressed audio. Set to {@link #NO_VALUE} for
|
||||
* other media types.
|
||||
*/
|
||||
@UnstableApi public final @C.PcmEncoding int pcmEncoding;
|
||||
|
||||
/**
|
||||
|
@ -352,18 +352,6 @@ public class ForwardingPlayer implements Player {
|
||||
return player.hasPreviousMediaItem();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link Player#seekToPreviousWindow()} on the delegate.
|
||||
*
|
||||
* @deprecated Use {@link #seekToPreviousMediaItem()} instead.
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // Forwarding to deprecated method
|
||||
@Deprecated
|
||||
@Override
|
||||
public void seekToPreviousWindow() {
|
||||
player.seekToPreviousWindow();
|
||||
}
|
||||
|
||||
/** Calls {@link Player#seekToPreviousMediaItem()} on the delegate. */
|
||||
@Override
|
||||
public void seekToPreviousMediaItem() {
|
||||
@ -382,60 +370,12 @@ public class ForwardingPlayer implements Player {
|
||||
return player.getMaxSeekToPreviousPosition();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link Player#hasNext()} on the delegate and returns the result.
|
||||
*
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // Forwarding to deprecated method
|
||||
@Deprecated
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return player.hasNext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link Player#hasNextWindow()} on the delegate and returns the result.
|
||||
*
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // Forwarding to deprecated method
|
||||
@Deprecated
|
||||
@Override
|
||||
public boolean hasNextWindow() {
|
||||
return player.hasNextWindow();
|
||||
}
|
||||
|
||||
/** Calls {@link Player#hasNextMediaItem()} on the delegate and returns the result. */
|
||||
@Override
|
||||
public boolean hasNextMediaItem() {
|
||||
return player.hasNextMediaItem();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link Player#next()} on the delegate.
|
||||
*
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // Forwarding to deprecated method
|
||||
@Deprecated
|
||||
@Override
|
||||
public void next() {
|
||||
player.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link Player#seekToNextWindow()} on the delegate.
|
||||
*
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@SuppressWarnings("deprecation") // Forwarding to deprecated method
|
||||
@Deprecated
|
||||
@Override
|
||||
public void seekToNextWindow() {
|
||||
player.seekToNextWindow();
|
||||
}
|
||||
|
||||
/** Calls {@link Player#seekToNextMediaItem()} on the delegate. */
|
||||
@Override
|
||||
public void seekToNextMediaItem() {
|
||||
|
@ -21,7 +21,7 @@ import android.os.Bundle;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A rating expressed as "heart" or "no heart". It can be used to indicate whether the content is a
|
||||
@ -60,7 +60,7 @@ public final class HeartRating extends Rating {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(rated, isHeart);
|
||||
return Objects.hash(rated, isHeart);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -21,6 +21,7 @@ import android.os.Bundle;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import java.util.Objects;
|
||||
|
||||
/** A label for a {@link Format}. */
|
||||
@UnstableApi
|
||||
@ -55,7 +56,7 @@ public class Label {
|
||||
return false;
|
||||
}
|
||||
Label label = (Label) o;
|
||||
return Util.areEqual(language, label.language) && Util.areEqual(value, label.value);
|
||||
return Objects.equals(language, label.language) && Objects.equals(value, label.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -39,6 +39,7 @@ import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
|
||||
/** Representation of a media item. */
|
||||
@ -915,8 +916,8 @@ public final class MediaItem {
|
||||
|
||||
DrmConfiguration other = (DrmConfiguration) obj;
|
||||
return scheme.equals(other.scheme)
|
||||
&& Util.areEqual(licenseUri, other.licenseUri)
|
||||
&& Util.areEqual(licenseRequestHeaders, other.licenseRequestHeaders)
|
||||
&& Objects.equals(licenseUri, other.licenseUri)
|
||||
&& Objects.equals(licenseRequestHeaders, other.licenseRequestHeaders)
|
||||
&& multiSession == other.multiSession
|
||||
&& forceDefaultLicenseUri == other.forceDefaultLicenseUri
|
||||
&& playClearContentWithoutKey == other.playClearContentWithoutKey
|
||||
@ -1090,7 +1091,7 @@ public final class MediaItem {
|
||||
}
|
||||
|
||||
AdsConfiguration other = (AdsConfiguration) obj;
|
||||
return adTagUri.equals(other.adTagUri) && Util.areEqual(adsId, other.adsId);
|
||||
return adTagUri.equals(other.adTagUri) && Objects.equals(adsId, other.adsId);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1209,14 +1210,14 @@ public final class MediaItem {
|
||||
LocalConfiguration other = (LocalConfiguration) obj;
|
||||
|
||||
return uri.equals(other.uri)
|
||||
&& Util.areEqual(mimeType, other.mimeType)
|
||||
&& Util.areEqual(drmConfiguration, other.drmConfiguration)
|
||||
&& Util.areEqual(adsConfiguration, other.adsConfiguration)
|
||||
&& Objects.equals(mimeType, other.mimeType)
|
||||
&& Objects.equals(drmConfiguration, other.drmConfiguration)
|
||||
&& Objects.equals(adsConfiguration, other.adsConfiguration)
|
||||
&& streamKeys.equals(other.streamKeys)
|
||||
&& Util.areEqual(customCacheKey, other.customCacheKey)
|
||||
&& Objects.equals(customCacheKey, other.customCacheKey)
|
||||
&& subtitleConfigurations.equals(other.subtitleConfigurations)
|
||||
&& Util.areEqual(tag, other.tag)
|
||||
&& Util.areEqual(imageDurationMs, other.imageDurationMs);
|
||||
&& Objects.equals(tag, other.tag)
|
||||
&& imageDurationMs == other.imageDurationMs;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1714,12 +1715,12 @@ public final class MediaItem {
|
||||
SubtitleConfiguration other = (SubtitleConfiguration) obj;
|
||||
|
||||
return uri.equals(other.uri)
|
||||
&& Util.areEqual(mimeType, other.mimeType)
|
||||
&& Util.areEqual(language, other.language)
|
||||
&& Objects.equals(mimeType, other.mimeType)
|
||||
&& Objects.equals(language, other.language)
|
||||
&& selectionFlags == other.selectionFlags
|
||||
&& roleFlags == other.roleFlags
|
||||
&& Util.areEqual(label, other.label)
|
||||
&& Util.areEqual(id, other.id);
|
||||
&& Objects.equals(label, other.label)
|
||||
&& Objects.equals(id, other.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1852,6 +1853,7 @@ public final class MediaItem {
|
||||
private boolean relativeToLiveWindow;
|
||||
private boolean relativeToDefaultPosition;
|
||||
private boolean startsAtKeyFrame;
|
||||
private boolean allowUnseekableMedia;
|
||||
|
||||
/** Creates a new instance with default values. */
|
||||
public Builder() {
|
||||
@ -1864,6 +1866,7 @@ public final class MediaItem {
|
||||
relativeToLiveWindow = clippingConfiguration.relativeToLiveWindow;
|
||||
relativeToDefaultPosition = clippingConfiguration.relativeToDefaultPosition;
|
||||
startsAtKeyFrame = clippingConfiguration.startsAtKeyFrame;
|
||||
allowUnseekableMedia = clippingConfiguration.allowUnseekableMedia;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1941,6 +1944,21 @@ public final class MediaItem {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether clipping to a non-zero start position in unseekable media is allowed (Default:
|
||||
* {@code false}).
|
||||
*
|
||||
* <p>Note that this could be inefficient because a player needs to read and decode all
|
||||
* samples from the beginning of the file and it should only be used if the clip start
|
||||
* position is small and the entire data before the start position fits into memory.
|
||||
*/
|
||||
@UnstableApi
|
||||
@CanIgnoreReturnValue
|
||||
public Builder setAllowUnseekableMedia(boolean allowUnseekableMedia) {
|
||||
this.allowUnseekableMedia = allowUnseekableMedia;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link ClippingConfiguration} instance initialized with the values of this
|
||||
* builder.
|
||||
@ -1993,9 +2011,12 @@ public final class MediaItem {
|
||||
*/
|
||||
public final boolean relativeToDefaultPosition;
|
||||
|
||||
/** Sets whether the start point is guaranteed to be a key frame. */
|
||||
/** Whether the start point is guaranteed to be a key frame. */
|
||||
public final boolean startsAtKeyFrame;
|
||||
|
||||
/** Whether clipping to a non-zero start position in unseekable media is allowed. */
|
||||
@UnstableApi public final boolean allowUnseekableMedia;
|
||||
|
||||
private ClippingConfiguration(Builder builder) {
|
||||
this.startPositionMs = usToMs(builder.startPositionUs);
|
||||
this.endPositionMs = usToMs(builder.endPositionUs);
|
||||
@ -2004,6 +2025,7 @@ public final class MediaItem {
|
||||
this.relativeToLiveWindow = builder.relativeToLiveWindow;
|
||||
this.relativeToDefaultPosition = builder.relativeToDefaultPosition;
|
||||
this.startsAtKeyFrame = builder.startsAtKeyFrame;
|
||||
this.allowUnseekableMedia = builder.allowUnseekableMedia;
|
||||
}
|
||||
|
||||
/** Returns a {@link Builder} initialized with the values of this instance. */
|
||||
@ -2026,7 +2048,8 @@ public final class MediaItem {
|
||||
&& endPositionUs == other.endPositionUs
|
||||
&& relativeToLiveWindow == other.relativeToLiveWindow
|
||||
&& relativeToDefaultPosition == other.relativeToDefaultPosition
|
||||
&& startsAtKeyFrame == other.startsAtKeyFrame;
|
||||
&& startsAtKeyFrame == other.startsAtKeyFrame
|
||||
&& allowUnseekableMedia == other.allowUnseekableMedia;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -2036,6 +2059,7 @@ public final class MediaItem {
|
||||
result = 31 * result + (relativeToLiveWindow ? 1 : 0);
|
||||
result = 31 * result + (relativeToDefaultPosition ? 1 : 0);
|
||||
result = 31 * result + (startsAtKeyFrame ? 1 : 0);
|
||||
result = 31 * result + (allowUnseekableMedia ? 1 : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -2044,8 +2068,9 @@ public final class MediaItem {
|
||||
private static final String FIELD_RELATIVE_TO_LIVE_WINDOW = Util.intToStringMaxRadix(2);
|
||||
private static final String FIELD_RELATIVE_TO_DEFAULT_POSITION = Util.intToStringMaxRadix(3);
|
||||
private static final String FIELD_STARTS_AT_KEY_FRAME = Util.intToStringMaxRadix(4);
|
||||
static final String FIELD_START_POSITION_US = Util.intToStringMaxRadix(5);
|
||||
static final String FIELD_END_POSITION_US = Util.intToStringMaxRadix(6);
|
||||
@VisibleForTesting static final String FIELD_START_POSITION_US = Util.intToStringMaxRadix(5);
|
||||
@VisibleForTesting static final String FIELD_END_POSITION_US = Util.intToStringMaxRadix(6);
|
||||
private static final String FIELD_ALLOW_UNSEEKABLE_MEDIA = Util.intToStringMaxRadix(7);
|
||||
|
||||
@UnstableApi
|
||||
public Bundle toBundle() {
|
||||
@ -2071,6 +2096,9 @@ public final class MediaItem {
|
||||
if (startsAtKeyFrame != UNSET.startsAtKeyFrame) {
|
||||
bundle.putBoolean(FIELD_STARTS_AT_KEY_FRAME, startsAtKeyFrame);
|
||||
}
|
||||
if (allowUnseekableMedia != UNSET.allowUnseekableMedia) {
|
||||
bundle.putBoolean(FIELD_ALLOW_UNSEEKABLE_MEDIA, allowUnseekableMedia);
|
||||
}
|
||||
return bundle;
|
||||
}
|
||||
|
||||
@ -2095,7 +2123,11 @@ public final class MediaItem {
|
||||
/* defaultValue= */ UNSET.relativeToDefaultPosition))
|
||||
.setStartsAtKeyFrame(
|
||||
bundle.getBoolean(
|
||||
FIELD_STARTS_AT_KEY_FRAME, /* defaultValue= */ UNSET.startsAtKeyFrame));
|
||||
FIELD_STARTS_AT_KEY_FRAME, /* defaultValue= */ UNSET.startsAtKeyFrame))
|
||||
.setAllowUnseekableMedia(
|
||||
bundle.getBoolean(
|
||||
FIELD_ALLOW_UNSEEKABLE_MEDIA,
|
||||
/* defaultValue= */ UNSET.allowUnseekableMedia));
|
||||
long startPositionUs =
|
||||
bundle.getLong(FIELD_START_POSITION_US, /* defaultValue= */ UNSET.startPositionUs);
|
||||
if (startPositionUs != UNSET.startPositionUs) {
|
||||
@ -2216,8 +2248,8 @@ public final class MediaItem {
|
||||
return false;
|
||||
}
|
||||
RequestMetadata that = (RequestMetadata) o;
|
||||
return Util.areEqual(mediaUri, that.mediaUri)
|
||||
&& Util.areEqual(searchQuery, that.searchQuery)
|
||||
return Objects.equals(mediaUri, that.mediaUri)
|
||||
&& Objects.equals(searchQuery, that.searchQuery)
|
||||
&& ((extras == null) == (that.extras == null));
|
||||
}
|
||||
|
||||
@ -2337,12 +2369,12 @@ public final class MediaItem {
|
||||
|
||||
MediaItem other = (MediaItem) obj;
|
||||
|
||||
return Util.areEqual(mediaId, other.mediaId)
|
||||
return Objects.equals(mediaId, other.mediaId)
|
||||
&& clippingConfiguration.equals(other.clippingConfiguration)
|
||||
&& Util.areEqual(localConfiguration, other.localConfiguration)
|
||||
&& Util.areEqual(liveConfiguration, other.liveConfiguration)
|
||||
&& Util.areEqual(mediaMetadata, other.mediaMetadata)
|
||||
&& Util.areEqual(requestMetadata, other.requestMetadata);
|
||||
&& Objects.equals(localConfiguration, other.localConfiguration)
|
||||
&& Objects.equals(liveConfiguration, other.liveConfiguration)
|
||||
&& Objects.equals(mediaMetadata, other.mediaMetadata)
|
||||
&& Objects.equals(requestMetadata, other.requestMetadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -29,11 +29,11 @@ public final class MediaLibraryInfo {
|
||||
|
||||
/** The version of the library expressed as a string, for example "1.2.3" or "1.2.0-beta01". */
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
|
||||
public static final String VERSION = "1.6.0-beta01";
|
||||
public static final String VERSION = "1.6.0";
|
||||
|
||||
/** The version of the library expressed as {@code TAG + "/" + VERSION}. */
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
|
||||
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.0-beta01";
|
||||
public static final String VERSION_SLASHY = "AndroidXMedia3/1.6.0";
|
||||
|
||||
/**
|
||||
* The version of the library expressed as an integer, for example 1002003300.
|
||||
@ -47,7 +47,7 @@ public final class MediaLibraryInfo {
|
||||
* (123-045-006-3-00).
|
||||
*/
|
||||
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
|
||||
public static final int VERSION_INT = 1_006_000_1_01;
|
||||
public static final int VERSION_INT = 1_006_000_3_00;
|
||||
|
||||
/** Whether the library was compiled with {@link Assertions} checks enabled. */
|
||||
public static final boolean ASSERTIONS_ENABLED = true;
|
||||
|
@ -29,7 +29,6 @@ import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.lang.annotation.Documented;
|
||||
@ -39,6 +38,7 @@ import java.lang.annotation.Target;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Metadata of a {@link MediaItem}, playlist, or a combination of multiple sources of {@link
|
||||
@ -249,8 +249,8 @@ public final class MediaMetadata {
|
||||
@CanIgnoreReturnValue
|
||||
public Builder maybeSetArtworkData(byte[] artworkData, @PictureType int artworkDataType) {
|
||||
if (this.artworkData == null
|
||||
|| Util.areEqual(artworkDataType, PICTURE_TYPE_FRONT_COVER)
|
||||
|| !Util.areEqual(this.artworkDataType, PICTURE_TYPE_FRONT_COVER)) {
|
||||
|| artworkDataType == PICTURE_TYPE_FRONT_COVER
|
||||
|| !Objects.equals(this.artworkDataType, PICTURE_TYPE_FRONT_COVER)) {
|
||||
this.artworkData = artworkData.clone();
|
||||
this.artworkDataType = artworkDataType;
|
||||
}
|
||||
@ -1221,47 +1221,47 @@ public final class MediaMetadata {
|
||||
return false;
|
||||
}
|
||||
MediaMetadata that = (MediaMetadata) obj;
|
||||
return Util.areEqual(title, that.title)
|
||||
&& Util.areEqual(artist, that.artist)
|
||||
&& Util.areEqual(albumTitle, that.albumTitle)
|
||||
&& Util.areEqual(albumArtist, that.albumArtist)
|
||||
&& Util.areEqual(displayTitle, that.displayTitle)
|
||||
&& Util.areEqual(subtitle, that.subtitle)
|
||||
&& Util.areEqual(description, that.description)
|
||||
&& Util.areEqual(durationMs, that.durationMs)
|
||||
&& Util.areEqual(userRating, that.userRating)
|
||||
&& Util.areEqual(overallRating, that.overallRating)
|
||||
return Objects.equals(title, that.title)
|
||||
&& Objects.equals(artist, that.artist)
|
||||
&& Objects.equals(albumTitle, that.albumTitle)
|
||||
&& Objects.equals(albumArtist, that.albumArtist)
|
||||
&& Objects.equals(displayTitle, that.displayTitle)
|
||||
&& Objects.equals(subtitle, that.subtitle)
|
||||
&& Objects.equals(description, that.description)
|
||||
&& Objects.equals(durationMs, that.durationMs)
|
||||
&& Objects.equals(userRating, that.userRating)
|
||||
&& Objects.equals(overallRating, that.overallRating)
|
||||
&& Arrays.equals(artworkData, that.artworkData)
|
||||
&& Util.areEqual(artworkDataType, that.artworkDataType)
|
||||
&& Util.areEqual(artworkUri, that.artworkUri)
|
||||
&& Util.areEqual(trackNumber, that.trackNumber)
|
||||
&& Util.areEqual(totalTrackCount, that.totalTrackCount)
|
||||
&& Util.areEqual(folderType, that.folderType)
|
||||
&& Util.areEqual(isBrowsable, that.isBrowsable)
|
||||
&& Util.areEqual(isPlayable, that.isPlayable)
|
||||
&& Util.areEqual(recordingYear, that.recordingYear)
|
||||
&& Util.areEqual(recordingMonth, that.recordingMonth)
|
||||
&& Util.areEqual(recordingDay, that.recordingDay)
|
||||
&& Util.areEqual(releaseYear, that.releaseYear)
|
||||
&& Util.areEqual(releaseMonth, that.releaseMonth)
|
||||
&& Util.areEqual(releaseDay, that.releaseDay)
|
||||
&& Util.areEqual(writer, that.writer)
|
||||
&& Util.areEqual(composer, that.composer)
|
||||
&& Util.areEqual(conductor, that.conductor)
|
||||
&& Util.areEqual(discNumber, that.discNumber)
|
||||
&& Util.areEqual(totalDiscCount, that.totalDiscCount)
|
||||
&& Util.areEqual(genre, that.genre)
|
||||
&& Util.areEqual(compilation, that.compilation)
|
||||
&& Util.areEqual(station, that.station)
|
||||
&& Util.areEqual(mediaType, that.mediaType)
|
||||
&& Util.areEqual(supportedCommands, that.supportedCommands)
|
||||
&& Objects.equals(artworkDataType, that.artworkDataType)
|
||||
&& Objects.equals(artworkUri, that.artworkUri)
|
||||
&& Objects.equals(trackNumber, that.trackNumber)
|
||||
&& Objects.equals(totalTrackCount, that.totalTrackCount)
|
||||
&& Objects.equals(folderType, that.folderType)
|
||||
&& Objects.equals(isBrowsable, that.isBrowsable)
|
||||
&& Objects.equals(isPlayable, that.isPlayable)
|
||||
&& Objects.equals(recordingYear, that.recordingYear)
|
||||
&& Objects.equals(recordingMonth, that.recordingMonth)
|
||||
&& Objects.equals(recordingDay, that.recordingDay)
|
||||
&& Objects.equals(releaseYear, that.releaseYear)
|
||||
&& Objects.equals(releaseMonth, that.releaseMonth)
|
||||
&& Objects.equals(releaseDay, that.releaseDay)
|
||||
&& Objects.equals(writer, that.writer)
|
||||
&& Objects.equals(composer, that.composer)
|
||||
&& Objects.equals(conductor, that.conductor)
|
||||
&& Objects.equals(discNumber, that.discNumber)
|
||||
&& Objects.equals(totalDiscCount, that.totalDiscCount)
|
||||
&& Objects.equals(genre, that.genre)
|
||||
&& Objects.equals(compilation, that.compilation)
|
||||
&& Objects.equals(station, that.station)
|
||||
&& Objects.equals(mediaType, that.mediaType)
|
||||
&& Objects.equals(supportedCommands, that.supportedCommands)
|
||||
&& ((extras == null) == (that.extras == null));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation") // Hashing deprecated fields.
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(
|
||||
return Objects.hash(
|
||||
title,
|
||||
artist,
|
||||
albumTitle,
|
||||
|
@ -22,7 +22,7 @@ import androidx.annotation.FloatRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import java.util.Objects;
|
||||
|
||||
/** A rating expressed as a percentage. */
|
||||
public final class PercentageRating extends Rating {
|
||||
@ -59,7 +59,7 @@ public final class PercentageRating extends Rating {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(percent);
|
||||
return Objects.hash(percent);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -36,6 +36,7 @@ import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.Objects;
|
||||
|
||||
/** Thrown when a non locally recoverable playback failure occurs. */
|
||||
public class PlaybackException extends Exception {
|
||||
@ -553,17 +554,17 @@ public class PlaybackException extends Exception {
|
||||
@Nullable Throwable thisCause = getCause();
|
||||
@Nullable Throwable thatCause = other.getCause();
|
||||
if (thisCause != null && thatCause != null) {
|
||||
if (!Util.areEqual(thisCause.getMessage(), thatCause.getMessage())) {
|
||||
if (!Objects.equals(thisCause.getMessage(), thatCause.getMessage())) {
|
||||
return false;
|
||||
}
|
||||
if (!Util.areEqual(thisCause.getClass(), thatCause.getClass())) {
|
||||
if (!Objects.equals(thisCause.getClass(), thatCause.getClass())) {
|
||||
return false;
|
||||
}
|
||||
} else if (thisCause != null || thatCause != null) {
|
||||
return false;
|
||||
}
|
||||
return errorCode == other.errorCode
|
||||
&& Util.areEqual(getMessage(), other.getMessage())
|
||||
&& Objects.equals(getMessage(), other.getMessage())
|
||||
&& timestampMs == other.timestampMs;
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,6 @@ import androidx.media3.common.text.CueGroup;
|
||||
import androidx.media3.common.util.Size;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
@ -45,6 +44,7 @@ import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A media player interface defining high-level functionality, such as the ability to play, pause,
|
||||
@ -352,13 +352,13 @@ public interface Player {
|
||||
}
|
||||
PositionInfo that = (PositionInfo) o;
|
||||
return equalsForBundling(that)
|
||||
&& Objects.equal(windowUid, that.windowUid)
|
||||
&& Objects.equal(periodUid, that.periodUid);
|
||||
&& Objects.equals(windowUid, that.windowUid)
|
||||
&& Objects.equals(periodUid, that.periodUid);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(
|
||||
return Objects.hash(
|
||||
windowUid,
|
||||
mediaItemIndex,
|
||||
mediaItem,
|
||||
@ -382,7 +382,7 @@ public interface Player {
|
||||
&& contentPositionMs == other.contentPositionMs
|
||||
&& adGroupIndex == other.adGroupIndex
|
||||
&& adIndexInAdGroup == other.adIndexInAdGroup
|
||||
&& Objects.equal(mediaItem, other.mediaItem);
|
||||
&& Objects.equals(mediaItem, other.mediaItem);
|
||||
}
|
||||
|
||||
@VisibleForTesting static final String FIELD_MEDIA_ITEM_INDEX = Util.intToStringMaxRadix(0);
|
||||
@ -2648,13 +2648,6 @@ public interface Player {
|
||||
*/
|
||||
boolean hasPreviousMediaItem();
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToPreviousMediaItem()} instead.
|
||||
*/
|
||||
@UnstableApi
|
||||
@Deprecated
|
||||
void seekToPreviousWindow();
|
||||
|
||||
/**
|
||||
* Seeks to the default position of the previous {@link MediaItem}, which may depend on the
|
||||
* current repeat mode and whether shuffle mode is enabled. Does nothing if {@link
|
||||
@ -2703,20 +2696,6 @@ public interface Player {
|
||||
*/
|
||||
void seekToPrevious();
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@UnstableApi
|
||||
@Deprecated
|
||||
boolean hasNext();
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #hasNextMediaItem()} instead.
|
||||
*/
|
||||
@UnstableApi
|
||||
@Deprecated
|
||||
boolean hasNextWindow();
|
||||
|
||||
/**
|
||||
* Returns whether a next {@link MediaItem} exists, which may depend on the current repeat mode
|
||||
* and whether shuffle mode is enabled.
|
||||
@ -2730,20 +2709,6 @@ public interface Player {
|
||||
*/
|
||||
boolean hasNextMediaItem();
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@UnstableApi
|
||||
@Deprecated
|
||||
void next();
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #seekToNextMediaItem()} instead.
|
||||
*/
|
||||
@UnstableApi
|
||||
@Deprecated
|
||||
void seekToNextWindow();
|
||||
|
||||
/**
|
||||
* Seeks to the default position of the next {@link MediaItem}, which may depend on the current
|
||||
* repeat mode and whether shuffle mode is enabled. Does nothing if {@link #hasNextMediaItem()} is
|
||||
|
@ -1,80 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package androidx.media3.common;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/** A {@link VideoGraph} specific to previewing. */
|
||||
@UnstableApi
|
||||
public interface PreviewingVideoGraph extends VideoGraph {
|
||||
|
||||
/** A factory for creating a {@link PreviewingVideoGraph}. */
|
||||
interface Factory {
|
||||
/**
|
||||
* Creates a new {@link PreviewingVideoGraph} instance.
|
||||
*
|
||||
* @param context A {@link Context}.
|
||||
* @param outputColorInfo The {@link ColorInfo} for the output frames.
|
||||
* @param debugViewProvider A {@link DebugViewProvider}.
|
||||
* @param listener A {@link Listener}.
|
||||
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
|
||||
* @param videoCompositorSettings The {@link VideoCompositorSettings}.
|
||||
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
|
||||
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
|
||||
* @return A new instance.
|
||||
* @throws VideoFrameProcessingException If a problem occurs while creating the {@link
|
||||
* VideoFrameProcessor}.
|
||||
*/
|
||||
PreviewingVideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs)
|
||||
throws VideoFrameProcessingException;
|
||||
|
||||
/**
|
||||
* Returns whether the {@link VideoGraph} implementation supports {@linkplain #registerInput
|
||||
* registering} multiple inputs.
|
||||
*/
|
||||
boolean supportsMultipleInputs();
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the oldest unrendered output frame that has become {@linkplain
|
||||
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
|
||||
* renderTimeNs}.
|
||||
*
|
||||
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
||||
* surface}, or drop the frame, per {@code renderTimeNs}.
|
||||
*
|
||||
* <p>The {@code renderTimeNs} may be passed to {@link
|
||||
* android.opengl.EGLExt#eglPresentationTimeANDROID} depending on the implementation.
|
||||
*
|
||||
* @param renderTimeNs The render time to use for the frame, in nanoseconds. The render time can
|
||||
* be before or after the current system time. Use {@link
|
||||
* VideoFrameProcessor#DROP_OUTPUT_FRAME} to drop the frame, or {@link
|
||||
* VideoFrameProcessor#RENDER_OUTPUT_FRAME_IMMEDIATELY} to render the frame immediately.
|
||||
*/
|
||||
void renderOutputFrame(long renderTimeNs);
|
||||
}
|
@ -1793,9 +1793,9 @@ public abstract class SimpleBasePlayer extends BasePlayer {
|
||||
return this.uid.equals(mediaItemData.uid)
|
||||
&& this.tracks.equals(mediaItemData.tracks)
|
||||
&& this.mediaItem.equals(mediaItemData.mediaItem)
|
||||
&& Util.areEqual(this.mediaMetadata, mediaItemData.mediaMetadata)
|
||||
&& Util.areEqual(this.manifest, mediaItemData.manifest)
|
||||
&& Util.areEqual(this.liveConfiguration, mediaItemData.liveConfiguration)
|
||||
&& Objects.equals(this.mediaMetadata, mediaItemData.mediaMetadata)
|
||||
&& Objects.equals(this.manifest, mediaItemData.manifest)
|
||||
&& Objects.equals(this.liveConfiguration, mediaItemData.liveConfiguration)
|
||||
&& this.presentationStartTimeMs == mediaItemData.presentationStartTimeMs
|
||||
&& this.windowStartTimeMs == mediaItemData.windowStartTimeMs
|
||||
&& this.elapsedRealtimeEpochOffsetMs == mediaItemData.elapsedRealtimeEpochOffsetMs
|
||||
@ -3460,7 +3460,7 @@ public abstract class SimpleBasePlayer extends BasePlayer {
|
||||
* index is in the range {@code fromIndex} < {@code toIndex} <= {@link
|
||||
* #getMediaItemCount()}.
|
||||
* @param newIndex The new index of the first moved item. The index is in the range {@code 0}
|
||||
* <= {@code newIndex} < {@link #getMediaItemCount() - (toIndex - fromIndex)}.
|
||||
* <= {@code newIndex} <= {@link #getMediaItemCount() - (toIndex - fromIndex)}.
|
||||
* @return A {@link ListenableFuture} indicating the completion of all immediate {@link State}
|
||||
* changes caused by this call.
|
||||
*/
|
||||
@ -3475,9 +3475,9 @@ public abstract class SimpleBasePlayer extends BasePlayer {
|
||||
* <p>Will only be called if {@link Player#COMMAND_CHANGE_MEDIA_ITEMS} is available.
|
||||
*
|
||||
* @param fromIndex The start index of the items to replace. The index is in the range 0 <=
|
||||
* {@code fromIndex} < {@link #getMediaItemCount()}.
|
||||
* {@code fromIndex} <= {@link #getMediaItemCount()}.
|
||||
* @param toIndex The index of the first item not to be replaced (exclusive). The index is in the
|
||||
* range {@code fromIndex} < {@code toIndex} <= {@link #getMediaItemCount()}.
|
||||
* range {@code fromIndex} <= {@code toIndex} <= {@link #getMediaItemCount()}.
|
||||
* @param mediaItems The media items to replace the specified range with.
|
||||
* @return A {@link ListenableFuture} indicating the completion of all immediate {@link State}
|
||||
* changes caused by this call.
|
||||
@ -3486,6 +3486,9 @@ public abstract class SimpleBasePlayer extends BasePlayer {
|
||||
protected ListenableFuture<?> handleReplaceMediaItems(
|
||||
int fromIndex, int toIndex, List<MediaItem> mediaItems) {
|
||||
ListenableFuture<?> addFuture = handleAddMediaItems(toIndex, mediaItems);
|
||||
if (fromIndex == toIndex) {
|
||||
return addFuture;
|
||||
}
|
||||
ListenableFuture<?> removeFuture = handleRemoveMediaItems(fromIndex, toIndex);
|
||||
return Util.transformFutureAsync(addFuture, unused -> removeFuture);
|
||||
}
|
||||
@ -3619,7 +3622,7 @@ public abstract class SimpleBasePlayer extends BasePlayer {
|
||||
Player.EVENT_MEDIA_ITEM_TRANSITION,
|
||||
listener -> listener.onMediaItemTransition(mediaItem, mediaItemTransitionReason));
|
||||
}
|
||||
if (!Util.areEqual(previousState.playerError, newState.playerError)) {
|
||||
if (!Objects.equals(previousState.playerError, newState.playerError)) {
|
||||
listeners.queueEvent(
|
||||
Player.EVENT_PLAYER_ERROR,
|
||||
listener -> listener.onPlayerErrorChanged(newState.playerError));
|
||||
|
@ -23,7 +23,7 @@ import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import java.util.Objects;
|
||||
|
||||
/** A rating expressed as a fractional number of stars. */
|
||||
public final class StarRating extends Rating {
|
||||
@ -84,7 +84,7 @@ public final class StarRating extends Rating {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(maxStars, starRating);
|
||||
return Objects.hash(maxStars, starRating);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -21,7 +21,7 @@ import android.os.Bundle;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import java.util.Objects;
|
||||
|
||||
/** A rating expressed as "thumbs up" or "thumbs down". */
|
||||
public final class ThumbRating extends Rating {
|
||||
@ -57,7 +57,7 @@ public final class ThumbRating extends Rating {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(rated, isThumbsUp);
|
||||
return Objects.hash(rated, isThumbsUp);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -36,6 +36,7 @@ import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import com.google.errorprone.annotations.InlineMe;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.checkerframework.checker.nullness.qual.NonNull;
|
||||
|
||||
/**
|
||||
@ -371,10 +372,10 @@ public abstract class Timeline {
|
||||
return false;
|
||||
}
|
||||
Window that = (Window) obj;
|
||||
return Util.areEqual(uid, that.uid)
|
||||
&& Util.areEqual(mediaItem, that.mediaItem)
|
||||
&& Util.areEqual(manifest, that.manifest)
|
||||
&& Util.areEqual(liveConfiguration, that.liveConfiguration)
|
||||
return Objects.equals(uid, that.uid)
|
||||
&& Objects.equals(mediaItem, that.mediaItem)
|
||||
&& Objects.equals(manifest, that.manifest)
|
||||
&& Objects.equals(liveConfiguration, that.liveConfiguration)
|
||||
&& presentationStartTimeMs == that.presentationStartTimeMs
|
||||
&& windowStartTimeMs == that.windowStartTimeMs
|
||||
&& elapsedRealtimeEpochOffsetMs == that.elapsedRealtimeEpochOffsetMs
|
||||
@ -871,13 +872,13 @@ public abstract class Timeline {
|
||||
return false;
|
||||
}
|
||||
Period that = (Period) obj;
|
||||
return Util.areEqual(id, that.id)
|
||||
&& Util.areEqual(uid, that.uid)
|
||||
return Objects.equals(id, that.id)
|
||||
&& Objects.equals(uid, that.uid)
|
||||
&& windowIndex == that.windowIndex
|
||||
&& durationUs == that.durationUs
|
||||
&& positionInWindowUs == that.positionInWindowUs
|
||||
&& isPlaceholder == that.isPlaceholder
|
||||
&& Util.areEqual(adPlaybackState, that.adPlaybackState);
|
||||
&& Objects.equals(adPlaybackState, that.adPlaybackState);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,6 +161,11 @@ public final class TrackGroup {
|
||||
return id.equals(other.id) && Arrays.equals(formats, other.formats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return id + ": " + Arrays.toString(formats);
|
||||
}
|
||||
|
||||
private static final String FIELD_FORMATS = Util.intToStringMaxRadix(0);
|
||||
private static final String FIELD_ID = Util.intToStringMaxRadix(1);
|
||||
|
||||
|
@ -24,8 +24,10 @@ import android.opengl.EGLExt;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.IntDef;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.SystemClock;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
@ -170,8 +172,12 @@ public interface VideoFrameProcessor {
|
||||
* rendering.
|
||||
*
|
||||
* @param presentationTimeUs The presentation time of the frame, in microseconds.
|
||||
* @param isRedrawnFrame Whether the frame is a frame that is {@linkplain #redraw redrawn},
|
||||
* redrawn frames are rendered directly thus {@link #renderOutputFrame} must not be called
|
||||
* on such frames.
|
||||
*/
|
||||
default void onOutputFrameAvailableForRendering(long presentationTimeUs) {}
|
||||
default void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {}
|
||||
|
||||
/**
|
||||
* Called when an exception occurs during asynchronous video frame processing.
|
||||
@ -186,10 +192,10 @@ public interface VideoFrameProcessor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates the frame should be rendered immediately after {@link #renderOutputFrame(long)} is
|
||||
* invoked.
|
||||
* @deprecated Pass {@link SystemClock#nanoTime()} to {@link #renderOutputFrame} to render an
|
||||
* output frame immediately.
|
||||
*/
|
||||
long RENDER_OUTPUT_FRAME_IMMEDIATELY = -1;
|
||||
@Deprecated long RENDER_OUTPUT_FRAME_IMMEDIATELY = -1;
|
||||
|
||||
/** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */
|
||||
long DROP_OUTPUT_FRAME = -2;
|
||||
@ -201,12 +207,18 @@ public interface VideoFrameProcessor {
|
||||
@SuppressWarnings("GoodTime-ApiWithNumericTimeUnit") // This is a named constant, not a time unit.
|
||||
long RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME = -3;
|
||||
|
||||
/** A maker for passing to {@link #registerInputStream} to signal a redraw. */
|
||||
ImmutableList<Effect> REDRAW = ImmutableList.of(new Effect() {});
|
||||
|
||||
/**
|
||||
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
|
||||
*
|
||||
* <p>Can be called many times after {@link #registerInputStream registering the input stream} to
|
||||
* put multiple frames in the same input stream.
|
||||
*
|
||||
* <p>An implementation could {@link Bitmap#recycle} the passed in {@link Bitmap}, so it may not
|
||||
* be suitable for reuse.
|
||||
*
|
||||
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
|
||||
* @param timestampIterator A {@link TimestampIterator} generating the exact timestamps that the
|
||||
* bitmap should be shown at.
|
||||
@ -269,13 +281,21 @@ public interface VideoFrameProcessor {
|
||||
*/
|
||||
Surface getInputSurface();
|
||||
|
||||
/**
|
||||
* Updates an {@linkplain Listener#onOutputFrameAvailableForRendering available frame} with the
|
||||
* modified effects.
|
||||
*
|
||||
* <p>This method can be called from any thread.
|
||||
*/
|
||||
void redraw();
|
||||
|
||||
/**
|
||||
* Informs the {@code VideoFrameProcessor} that a new input stream will be queued with the list of
|
||||
* {@link Effect Effects} to apply to the new input stream.
|
||||
*
|
||||
* <p>After registering the first input stream, this method must only be called after the last
|
||||
* frame of the already-registered input stream has been {@linkplain #registerInputFrame
|
||||
* registered}, last bitmap {@link #queueInputBitmap queued} or last texture id {@linkplain
|
||||
* registered}, last bitmap {@linkplain #queueInputBitmap queued} or last texture id {@linkplain
|
||||
* #queueInputTexture queued}.
|
||||
*
|
||||
* <p>This method blocks the calling thread until the previous calls to this method finish, that
|
||||
@ -338,25 +358,26 @@ public interface VideoFrameProcessor {
|
||||
|
||||
/**
|
||||
* Renders the oldest unrendered output frame that has become {@linkplain
|
||||
* Listener#onOutputFrameAvailableForRendering(long) available for rendering} at the given {@code
|
||||
* renderTimeNs}.
|
||||
* Listener#onOutputFrameAvailableForRendering(long, boolean) available for rendering} at the
|
||||
* given {@code renderTimeNs}.
|
||||
*
|
||||
* <p>This will either render the output frame to the {@linkplain #setOutputSurfaceInfo output
|
||||
* surface}, or drop the frame, per {@code renderTimeNs}.
|
||||
*
|
||||
* <p>This method must only be called if {@code renderFramesAutomatically} was set to {@code
|
||||
* false} using the {@link Factory} and should be called exactly once for each frame that becomes
|
||||
* {@linkplain Listener#onOutputFrameAvailableForRendering(long) available for rendering}.
|
||||
* {@linkplain Listener#onOutputFrameAvailableForRendering(long, boolean) available for
|
||||
* rendering}.
|
||||
*
|
||||
* <p>The {@code renderTimeNs} may be passed to {@link EGLExt#eglPresentationTimeANDROID}
|
||||
* depending on the implementation.
|
||||
*
|
||||
* @param renderTimeNs The render time to use for the frame, in nanoseconds. The render time can
|
||||
* be before or after the current system time. Use {@link #DROP_OUTPUT_FRAME} to drop the
|
||||
* frame, or {@link #RENDER_OUTPUT_FRAME_IMMEDIATELY} to render the frame immediately, or
|
||||
* {@link #RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME} to render the frame to the {@linkplain
|
||||
* #setOutputSurfaceInfo output surface} with the presentation timestamp seen in {@link
|
||||
* Listener#onOutputFrameAvailableForRendering(long)}.
|
||||
* frame or {@link #RENDER_OUTPUT_FRAME_WITH_PRESENTATION_TIME} to render the frame to the
|
||||
* {@linkplain #setOutputSurfaceInfo output surface} with the presentation timestamp seen in
|
||||
* {@link Listener#onOutputFrameAvailableForRendering(long, boolean)}. If the frame should be
|
||||
* rendered immediately, pass in {@link SystemClock#nanoTime()}.
|
||||
*/
|
||||
void renderOutputFrame(long renderTimeNs);
|
||||
|
||||
|
@ -16,16 +16,61 @@
|
||||
|
||||
package androidx.media3.common;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.VideoFrameProcessor.InputType;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/** Represents a graph for processing raw video frames. */
|
||||
@UnstableApi
|
||||
public interface VideoGraph {
|
||||
|
||||
/** A factory for {@link VideoGraph} instances. */
|
||||
interface Factory {
|
||||
/**
|
||||
* Creates a new {@link VideoGraph} instance.
|
||||
*
|
||||
* @param context A {@link Context}.
|
||||
* @param outputColorInfo The {@link ColorInfo} for the output frames.
|
||||
* @param debugViewProvider A {@link DebugViewProvider}.
|
||||
* @param listener A {@link Listener}.
|
||||
* @param listenerExecutor The {@link Executor} on which the {@code listener} is invoked.
|
||||
* @param videoCompositorSettings The {@link VideoCompositorSettings} to apply to the
|
||||
* composition.
|
||||
* @param compositionEffects A list of {@linkplain Effect effects} to apply to the composition.
|
||||
* @param initialTimestampOffsetUs The timestamp offset for the first frame, in microseconds.
|
||||
* @param renderFramesAutomatically If {@code true}, the instance will render output frames to
|
||||
* the {@linkplain VideoGraph#setOutputSurfaceInfo(SurfaceInfo) output surface}
|
||||
* automatically as the instance is done processing them. If {@code false}, the instance
|
||||
* will block until {@code VideoGraph#renderOutputFrameWithMediaPresentationTime()} is
|
||||
* called, to render the frame.
|
||||
* @return A new instance.
|
||||
*/
|
||||
VideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs,
|
||||
boolean renderFramesAutomatically);
|
||||
|
||||
/**
|
||||
* Returns whether the {@linkplain #create created} {@link VideoGraph} supports multiple video
|
||||
* {@linkplain VideoGraph#registerInputStream inputs}.
|
||||
*/
|
||||
boolean supportsMultipleInputs();
|
||||
}
|
||||
|
||||
/** Listener for video frame processing events. */
|
||||
@UnstableApi
|
||||
interface Listener {
|
||||
/**
|
||||
* Called when the output size changes.
|
||||
@ -48,8 +93,12 @@ public interface VideoGraph {
|
||||
* for rendering.
|
||||
*
|
||||
* @param framePresentationTimeUs The presentation time of the frame, in microseconds.
|
||||
* @param isRedrawnFrame Whether the frame is a frame that is {@linkplain #redraw redrawn},
|
||||
* redrawn frames are rendered directly thus {@link #renderOutputFrame} must not be called
|
||||
* on such frames.
|
||||
*/
|
||||
default void onOutputFrameAvailableForRendering(long framePresentationTimeUs) {}
|
||||
default void onOutputFrameAvailableForRendering(
|
||||
long framePresentationTimeUs, boolean isRedrawnFrame) {}
|
||||
|
||||
/**
|
||||
* Called after the {@link VideoGraph} has rendered its final output frame.
|
||||
@ -79,11 +128,8 @@ public interface VideoGraph {
|
||||
/**
|
||||
* Registers a new input to the {@code VideoGraph}.
|
||||
*
|
||||
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
|
||||
* called.
|
||||
*
|
||||
* <p>All inputs must be registered before rendering frames to the underlying {@link
|
||||
* #getProcessor(int) VideoFrameProcessor}.
|
||||
* <p>All inputs must be registered before rendering frames by calling {@link
|
||||
* #registerInputFrame}, {@link #queueInputBitmap} or {@link #queueInputTexture}.
|
||||
*
|
||||
* <p>If the method throws, the caller must call {@link #release}.
|
||||
*
|
||||
@ -92,13 +138,6 @@ public interface VideoGraph {
|
||||
*/
|
||||
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;
|
||||
|
||||
/**
|
||||
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
|
||||
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain #registerInput(int)
|
||||
* registered} before, this method will throw an {@link IllegalStateException}.
|
||||
*/
|
||||
VideoFrameProcessor getProcessor(int inputIndex);
|
||||
|
||||
/**
|
||||
* Sets the output surface and supporting information.
|
||||
*
|
||||
@ -116,11 +155,111 @@ public interface VideoGraph {
|
||||
*/
|
||||
void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo);
|
||||
|
||||
/**
|
||||
* Sets a listener that's called when the {@linkplain #getInputSurface input surface} is ready to
|
||||
* use at {@code inputIndex}.
|
||||
*/
|
||||
void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener);
|
||||
|
||||
/** Returns the input {@link Surface} at {@code inputIndex}. */
|
||||
Surface getInputSurface(int inputIndex);
|
||||
|
||||
/** Sets the {@link OnInputFrameProcessedListener} at {@code inputIndex}. */
|
||||
void setOnInputFrameProcessedListener(int inputIndex, OnInputFrameProcessedListener listener);
|
||||
|
||||
/**
|
||||
* Informs the graph that a new input stream will be queued to the graph input corresponding to
|
||||
* {@code inputIndex}.
|
||||
*
|
||||
* <p>After registering the first input stream, this method must only be called for the same index
|
||||
* after the last frame of the already-registered input stream has been {@linkplain
|
||||
* #registerInputFrame registered}, last bitmap {@linkplain #queueInputBitmap queued} or last
|
||||
* texture id {@linkplain #queueInputTexture queued}.
|
||||
*
|
||||
* <p>This method blocks the calling thread until the previous input stream corresponding to the
|
||||
* same {@code inputIndex} has been fully registered internally.
|
||||
*
|
||||
* @param inputIndex The index of the input for which a new input stream should be registered.
|
||||
* This index must start from 0.
|
||||
* @param inputType The {@link InputType} of the new input stream.
|
||||
* @param format The {@link Format} of the new input stream. The {@link Format#colorInfo}, the
|
||||
* {@link Format#width}, the {@link Format#height} and the {@link
|
||||
* Format#pixelWidthHeightRatio} must be set.
|
||||
* @param effects The list of {@link Effect effects} to apply to the new input stream.
|
||||
* @param offsetToAddUs The offset that must be added to the frame presentation timestamps, in
|
||||
* microseconds. This offset is not part of the input timestamps. It is added to the frame
|
||||
* timestamps before processing, and is retained in the output timestamps.
|
||||
*/
|
||||
void registerInputStream(
|
||||
int inputIndex,
|
||||
@InputType int inputType,
|
||||
Format format,
|
||||
List<Effect> effects,
|
||||
long offsetToAddUs);
|
||||
|
||||
/**
|
||||
* Returns the number of pending input frames at {@code inputIndex} that has not been processed
|
||||
* yet.
|
||||
*/
|
||||
int getPendingInputFrameCount(int inputIndex);
|
||||
|
||||
/**
|
||||
* Registers a new input frame at {@code inputIndex}.
|
||||
*
|
||||
* @see VideoFrameProcessor#registerInputFrame()
|
||||
*/
|
||||
boolean registerInputFrame(int inputIndex);
|
||||
|
||||
/**
|
||||
* Queues the input {@link Bitmap} at {@code inputIndex}.
|
||||
*
|
||||
* @see VideoFrameProcessor#queueInputBitmap(Bitmap, TimestampIterator)
|
||||
*/
|
||||
boolean queueInputBitmap(int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator);
|
||||
|
||||
/**
|
||||
* Queues the input texture at {@code inputIndex}.
|
||||
*
|
||||
* @see VideoFrameProcessor#queueInputTexture(int, long)
|
||||
*/
|
||||
boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs);
|
||||
|
||||
/**
|
||||
* Renders the output frame from the {@code VideoGraph}.
|
||||
*
|
||||
* <p>This method must be called only for frames that have become {@linkplain
|
||||
* Listener#onOutputFrameAvailableForRendering available}, calling the method renders the frame
|
||||
* that becomes available the earliest but not yet rendered.
|
||||
*
|
||||
* @see VideoFrameProcessor#renderOutputFrame(long)
|
||||
*/
|
||||
void renderOutputFrame(long renderTimeNs);
|
||||
|
||||
/**
|
||||
* Updates an {@linkplain Listener#onOutputFrameAvailableForRendering available frame} with the
|
||||
* modified effects.
|
||||
*/
|
||||
void redraw();
|
||||
|
||||
/**
|
||||
* Returns whether the {@code VideoGraph} has produced a frame with zero presentation timestamp.
|
||||
*/
|
||||
boolean hasProducedFrameWithTimestampZero();
|
||||
|
||||
/**
|
||||
* Flushes the {@linkplain #registerInput inputs} of the {@code VideoGraph}.
|
||||
*
|
||||
* @see VideoFrameProcessor#flush()
|
||||
*/
|
||||
void flush();
|
||||
|
||||
/**
|
||||
* Informs that no further inputs should be accepted at {@code inputIndex}.
|
||||
*
|
||||
* @see VideoFrameProcessor#signalEndOfInput()
|
||||
*/
|
||||
void signalEndOfInput(int inputIndex);
|
||||
|
||||
/**
|
||||
* Releases the associated resources.
|
||||
*
|
||||
|
@ -22,7 +22,6 @@ import android.media.AudioFocusRequest;
|
||||
import android.media.AudioManager;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.Message;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.annotation.RequiresApi;
|
||||
import androidx.media3.common.AudioAttributes;
|
||||
@ -56,7 +55,7 @@ public final class AudioFocusRequestCompat {
|
||||
this.audioAttributes = audioFocusRequestCompat;
|
||||
this.pauseOnDuck = pauseOnDuck;
|
||||
|
||||
if (Util.SDK_INT < 26 && focusChangeHandler.getLooper() != Looper.getMainLooper()) {
|
||||
if (Util.SDK_INT < 26) {
|
||||
this.onAudioFocusChangeListener =
|
||||
new OnAudioFocusChangeListenerHandlerCompat(
|
||||
onAudioFocusChangeListener, focusChangeHandler);
|
||||
@ -326,9 +325,7 @@ public final class AudioFocusRequestCompat {
|
||||
* a specific thread prior to API 26.
|
||||
*/
|
||||
private static class OnAudioFocusChangeListenerHandlerCompat
|
||||
implements Handler.Callback, AudioManager.OnAudioFocusChangeListener {
|
||||
|
||||
private static final int FOCUS_CHANGE = 0x002a74b2;
|
||||
implements AudioManager.OnAudioFocusChangeListener {
|
||||
|
||||
private final Handler handler;
|
||||
private final AudioManager.OnAudioFocusChangeListener listener;
|
||||
@ -336,21 +333,12 @@ public final class AudioFocusRequestCompat {
|
||||
/* package */ OnAudioFocusChangeListenerHandlerCompat(
|
||||
AudioManager.OnAudioFocusChangeListener listener, Handler handler) {
|
||||
this.listener = listener;
|
||||
this.handler = Util.createHandler(handler.getLooper(), /* callback= */ this);
|
||||
this.handler = Util.createHandler(handler.getLooper(), /* callback= */ null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAudioFocusChange(int focusChange) {
|
||||
handler.sendMessage(Message.obtain(handler, FOCUS_CHANGE, focusChange, 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean handleMessage(Message message) {
|
||||
if (message.what == FOCUS_CHANGE) {
|
||||
listener.onAudioFocusChange(message.arg1);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
Util.postOrRun(handler, () -> listener.onAudioFocusChange(focusChange));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,12 +15,18 @@
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.AudioManager;
|
||||
import android.os.Looper;
|
||||
import androidx.annotation.IntDef;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.util.BackgroundExecutor;
|
||||
import androidx.media3.common.util.ConditionVariable;
|
||||
import androidx.media3.common.util.Log;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
@ -28,11 +34,14 @@ import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
/** Compatibility layer for {@link AudioManager} with fallbacks for older Android versions. */
|
||||
@UnstableApi
|
||||
public final class AudioManagerCompat {
|
||||
|
||||
private static final String TAG = "AudioManagerCompat";
|
||||
|
||||
/**
|
||||
* Audio focus gain types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link
|
||||
* #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link
|
||||
@ -83,6 +92,55 @@ public final class AudioManagerCompat {
|
||||
public static final int AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE =
|
||||
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
|
||||
|
||||
@SuppressWarnings("NonFinalStaticField") // Lazily initialized under class lock
|
||||
@Nullable
|
||||
private static AudioManager audioManager;
|
||||
|
||||
@SuppressWarnings("NonFinalStaticField") // Lazily initialized under class lock
|
||||
private static @MonotonicNonNull Context applicationContext;
|
||||
|
||||
/**
|
||||
* Returns the {@link AudioManager}.
|
||||
*
|
||||
* <p>This method avoids potential threading issues where AudioManager keeps access to the thread
|
||||
* it was created on until after this thread is stopped.
|
||||
*
|
||||
* <p>It is recommended to use this method from a background thread.
|
||||
*
|
||||
* @param context A {@link Context}.
|
||||
* @return The {@link AudioManager}.
|
||||
*/
|
||||
public static synchronized AudioManager getAudioManager(Context context) {
|
||||
Context applicationContext = context.getApplicationContext();
|
||||
if (AudioManagerCompat.applicationContext != applicationContext) {
|
||||
// Reset cached instance if the application context changed. This should only happen in tests.
|
||||
audioManager = null;
|
||||
}
|
||||
if (audioManager != null) {
|
||||
return audioManager;
|
||||
}
|
||||
@Nullable Looper myLooper = Looper.myLooper();
|
||||
if (myLooper == null || myLooper == Looper.getMainLooper()) {
|
||||
// The AudioManager will assume the main looper as default callback anyway, so create the
|
||||
// instance here without using BackgroundExecutor.
|
||||
audioManager = (AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE);
|
||||
return checkNotNull(audioManager);
|
||||
}
|
||||
// Create the audio manager on the BackgroundExecutor to avoid running the potentially blocking
|
||||
// command on the main thread but still use a thread that is guaranteed to exist for the
|
||||
// lifetime of the app.
|
||||
ConditionVariable audioManagerSetCondition = new ConditionVariable();
|
||||
BackgroundExecutor.get()
|
||||
.execute(
|
||||
() -> {
|
||||
audioManager =
|
||||
(AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE);
|
||||
audioManagerSetCondition.open();
|
||||
});
|
||||
audioManagerSetCondition.blockUninterruptible();
|
||||
return checkNotNull(audioManager);
|
||||
}
|
||||
|
||||
/**
|
||||
* Requests audio focus. See the {@link AudioFocusRequestCompat} for information about the options
|
||||
* available to configure your request, and notification of focus gain and loss.
|
||||
|
@ -20,9 +20,9 @@ import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Interface for audio processors, which take audio data as input and transform it, potentially
|
||||
@ -107,7 +107,7 @@ public interface AudioProcessor {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(sampleRate, channelCount, encoding);
|
||||
return Objects.hash(sampleRate, channelCount, encoding);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,7 @@ package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
|
||||
@ -52,21 +53,41 @@ public final class ChannelMixingMatrix {
|
||||
private final boolean isIdentity;
|
||||
|
||||
/**
|
||||
* Creates a basic channel mixing matrix that converts from {@code inputChannelCount} channels to
|
||||
* {@code outputChannelCount} channels.
|
||||
* Returns a default constant gain channel mixing matrix that mixes {@code inputChannelCount}
|
||||
* channels into {@code outputChannelCount} channels.
|
||||
*
|
||||
* <p>If the input and output channel counts match then a simple identity matrix will be returned.
|
||||
* Otherwise, default matrix coefficients will be used to best match channel locations and overall
|
||||
* power level.
|
||||
* <p>This method returns an identity matrix if {@code inputChannelCount} and {@code
|
||||
* outputChannelCount} are equal.
|
||||
*
|
||||
* @param inputChannelCount Number of input channels.
|
||||
* @param outputChannelCount Number of output channels.
|
||||
* @return New channel mixing matrix.
|
||||
* @throws UnsupportedOperationException If no default matrix coefficients are implemented for the
|
||||
* given input and output channel counts.
|
||||
* @throws UnsupportedOperationException If no default coefficients are available for the given
|
||||
* input and output channel counts.
|
||||
* @deprecated Use {@link #createForConstantGain} instead.
|
||||
*/
|
||||
// TODO(b/300467493): Modify create() to use constant power defaults and migrate all users.
|
||||
public static ChannelMixingMatrix create(int inputChannelCount, int outputChannelCount) {
|
||||
// TODO(b/399861060): Remove in Media3 1.8.
|
||||
@Deprecated
|
||||
public static ChannelMixingMatrix create(
|
||||
@IntRange(from = 1, to = 2) int inputChannelCount,
|
||||
@IntRange(from = 1, to = 2) int outputChannelCount) {
|
||||
return createForConstantGain(inputChannelCount, outputChannelCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a default constant gain channel mixing matrix that mixes {@code inputChannelCount}
|
||||
* channels into {@code outputChannelCount} channels.
|
||||
*
|
||||
* <p>This method returns an identity matrix if {@code inputChannelCount} and {@code
|
||||
* outputChannelCount} are equal.
|
||||
*
|
||||
* @param inputChannelCount Number of input channels.
|
||||
* @param outputChannelCount Number of output channels.
|
||||
* @throws UnsupportedOperationException If no default coefficients are available for the given
|
||||
* input and output channel counts.
|
||||
*/
|
||||
public static ChannelMixingMatrix createForConstantGain(
|
||||
@IntRange(from = 1, to = 2) int inputChannelCount,
|
||||
@IntRange(from = 1, to = 2) int outputChannelCount) {
|
||||
return new ChannelMixingMatrix(
|
||||
inputChannelCount,
|
||||
outputChannelCount,
|
||||
@ -74,19 +95,31 @@ public final class ChannelMixingMatrix {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns default constant power matrix for mixing {@code inputChannelCount} channels into {@code
|
||||
* outputChannelCount} channels.
|
||||
* Returns a default constant power channel mixing matrix that mixes {@code inputChannelCount}
|
||||
* channels into {@code outputChannelCount} channels.
|
||||
*
|
||||
* <p>If the input and output channel counts match then a simple identity matrix will be returned.
|
||||
* <p>This method returns an identity matrix if {@code inputChannelCount} and {@code
|
||||
* outputChannelCount} are equal.
|
||||
*
|
||||
* <p>Channel counts map to the following layouts:
|
||||
*
|
||||
* <ol>
|
||||
* <li>[MONO]
|
||||
* <li>[FRONT_LEFT, FRONT_RIGHT]
|
||||
* <li>[FRONT_LEFT, FRONT_RIGHT, FRONT_CENTER]
|
||||
* <li>[FRONT_LEFT, FRONT_RIGHT, BACK_LEFT, BACK_RIGHT]
|
||||
* <li>[FRONT_LEFT, FRONT_RIGHT, FRONT_CENTER, BACK_LEFT, BACK_RIGHT]
|
||||
* <li>[FRONT_LEFT, FRONT_RIGHT, FRONT_CENTER, LOW_FREQUENCY, BACK_LEFT, BACK_RIGHT]
|
||||
* </ol>
|
||||
*
|
||||
* @param inputChannelCount Number of input channels.
|
||||
* @param outputChannelCount Number of output channels.
|
||||
* @return New channel mixing matrix.
|
||||
* @throws UnsupportedOperationException If no default coefficients are available for the given
|
||||
* input and output channel counts.
|
||||
* @throws UnsupportedOperationException If no default matrix coefficients are implemented for the
|
||||
* given input and output channel counts.
|
||||
*/
|
||||
public static ChannelMixingMatrix createForConstantPower(
|
||||
int inputChannelCount, int outputChannelCount) {
|
||||
@IntRange(from = 1, to = 6) int inputChannelCount,
|
||||
@IntRange(from = 1, to = 2) int outputChannelCount) {
|
||||
return new ChannelMixingMatrix(
|
||||
inputChannelCount,
|
||||
outputChannelCount,
|
||||
|
@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Util.durationUsToSampleCount;
|
||||
import static androidx.media3.common.util.Util.sampleCountToDurationUs;
|
||||
|
||||
import android.util.Pair;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.audio.GainProcessor.GainProvider;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.RangeMap;
|
||||
import com.google.common.collect.TreeRangeMap;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* Provides gain automation information to be applied on an audio stream.
|
||||
*
|
||||
* <p>The class allows combining multiple {@linkplain FadeProvider fade shapes} into one single
|
||||
* automation line, with common fade shapes already implemented (e.g. {@link #FADE_IN_LINEAR}).
|
||||
*
|
||||
* @see GainProcessor
|
||||
*/
|
||||
@UnstableApi
|
||||
public final class DefaultGainProvider implements GainProvider {
|
||||
|
||||
/** A builder for {@link DefaultGainProvider} instances. */
|
||||
public static final class Builder {
|
||||
private final TreeRangeMap<Long, Function<Pair<Long, Integer>, Float>> gainMap;
|
||||
private final float defaultGain;
|
||||
|
||||
/**
|
||||
* Returns a {@link DefaultGainProvider} builder.
|
||||
*
|
||||
* @param defaultGain Default gain value.
|
||||
*/
|
||||
public Builder(float defaultGain) {
|
||||
gainMap = TreeRangeMap.create();
|
||||
// Add default value for all possible positions.
|
||||
this.defaultGain = defaultGain;
|
||||
gainMap.put(Range.all(), (a) -> GAIN_UNSET);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a {@code shape} to be applied between [{@code positionUs}; {@code positionUs} + {@code
|
||||
* durationUs}).
|
||||
*
|
||||
* <p>This fade overwrites the shape of any previously added fade if they overlap.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
public Builder addFadeAt(
|
||||
@IntRange(from = 0) long positionUs,
|
||||
@IntRange(from = 1) long durationUs,
|
||||
FadeProvider shape) {
|
||||
checkArgument(positionUs >= 0);
|
||||
checkArgument(durationUs > 1);
|
||||
gainMap.put(
|
||||
Range.closedOpen(positionUs, positionUs + durationUs),
|
||||
(positionSampleRatePair) -> {
|
||||
int sampleRate = positionSampleRatePair.second;
|
||||
long relativeSamplePosition =
|
||||
positionSampleRatePair.first - durationUsToSampleCount(positionUs, sampleRate);
|
||||
return shape.getGainFactorAt(
|
||||
relativeSamplePosition, durationUsToSampleCount(durationUs, sampleRate));
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns a new {@link DefaultGainProvider} instance. */
|
||||
public DefaultGainProvider build() {
|
||||
return new DefaultGainProvider(gainMap, defaultGain);
|
||||
}
|
||||
}
|
||||
|
||||
/** Represents a time unit-agnostic fade shape to be applied over an automation. */
|
||||
public interface FadeProvider {
|
||||
|
||||
/**
|
||||
* Returns the gain factor within [0f; 1f] to apply to an audio sample for a specific fade
|
||||
* shape.
|
||||
*
|
||||
* <p>Position and duration are unit agnostic and work as a numerator/denominator pair.
|
||||
*
|
||||
* <p>You can implement a basic linear fade as follows:
|
||||
*
|
||||
* <pre>{@code
|
||||
* @Override
|
||||
* public float getGainFactorAt(long index, long duration) {
|
||||
* return (float) index / duration;
|
||||
* }
|
||||
* }</pre>
|
||||
*
|
||||
* @param index Position (numerator) between [0; {@code duration}].
|
||||
* @param duration Duration (denominator).
|
||||
*/
|
||||
float getGainFactorAt(@IntRange(from = 0) long index, @IntRange(from = 1) long duration);
|
||||
}
|
||||
|
||||
/**
|
||||
* Equal gain fade in.
|
||||
*
|
||||
* <p>Ramps linearly from 0 to 1.
|
||||
*
|
||||
* <p>Summing this with {@link #FADE_OUT_LINEAR} returns a constant gain of 1 for all valid
|
||||
* indexes.
|
||||
*/
|
||||
public static final FadeProvider FADE_IN_LINEAR = (index, duration) -> (float) index / duration;
|
||||
|
||||
/**
|
||||
* Equal gain fade out.
|
||||
*
|
||||
* <p>Ramps linearly from 1 to 0.
|
||||
*
|
||||
* <p>Summing this with {@link #FADE_IN_LINEAR} returns a constant gain of 1 for all valid
|
||||
* indexes.
|
||||
*/
|
||||
public static final FadeProvider FADE_OUT_LINEAR =
|
||||
(index, duration) -> (float) (duration - index) / duration;
|
||||
|
||||
/**
|
||||
* Equal power fade in.
|
||||
*
|
||||
* <p>Ramps from 0 to 1 using an equal power curve.
|
||||
*
|
||||
* <p>Summing this with {@link #FADE_OUT_EQUAL_POWER} returns a constant power of 1 for all valid
|
||||
* indexes.
|
||||
*/
|
||||
public static final FadeProvider FADE_IN_EQUAL_POWER =
|
||||
(index, duration) -> (float) Math.sin((Math.PI / 2.0) * index / duration);
|
||||
|
||||
/**
|
||||
* Equal power fade out.
|
||||
*
|
||||
* <p>Ramps from 1 to 0 using an equal power curve.
|
||||
*
|
||||
* <p>Summing this with {@link #FADE_IN_EQUAL_POWER} returns a constant power of 1 for all valid
|
||||
* indexes.
|
||||
*/
|
||||
public static final FadeProvider FADE_OUT_EQUAL_POWER =
|
||||
(index, duration) -> (float) Math.cos((Math.PI / 2.0) * index / duration);
|
||||
|
||||
private static final float GAIN_UNSET = C.RATE_UNSET;
|
||||
|
||||
/**
|
||||
* {@link RangeMap} for representing a sequence of fades applied at specific time ranges over a
|
||||
* default gain value.
|
||||
*
|
||||
* <p>Keys correspond to the position range in microseconds. Entry values correspond to a generic
|
||||
* {@link Function} that returns a gain value based on a sample position and sample rate.
|
||||
*/
|
||||
// Use TreeRangeMap instead of ImmutableRangeMap to allow overlapping ranges.
|
||||
private final TreeRangeMap<Long, Function<Pair<Long, Integer>, Float>> gainMap;
|
||||
|
||||
private final float defaultGain;
|
||||
|
||||
private DefaultGainProvider(
|
||||
TreeRangeMap<Long, Function<Pair<Long, Integer>, Float>> gainMap, float defaultGain) {
|
||||
this.gainMap = TreeRangeMap.create();
|
||||
this.gainMap.putAll(gainMap);
|
||||
this.defaultGain = defaultGain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getGainFactorAtSamplePosition(
|
||||
@IntRange(from = 0) long samplePosition, @IntRange(from = 1) int sampleRate) {
|
||||
checkState(sampleRate > 0);
|
||||
checkArgument(samplePosition >= 0);
|
||||
|
||||
// gainMap has a default value set for all possible values, so it should never return null.
|
||||
float gain =
|
||||
checkNotNull(gainMap.get(sampleCountToDurationUs(samplePosition, sampleRate)))
|
||||
.apply(Pair.create(samplePosition, sampleRate));
|
||||
if (gain == GAIN_UNSET) {
|
||||
return defaultGain;
|
||||
}
|
||||
return gain;
|
||||
}
|
||||
|
||||
@Override
|
||||
// TODO (b/400418589): Add support for non-default value unity ranges.
|
||||
public long isUnityUntil(
|
||||
@IntRange(from = 0) long samplePosition, @IntRange(from = 1) int sampleRate) {
|
||||
checkState(sampleRate > 0);
|
||||
checkArgument(samplePosition >= 0);
|
||||
|
||||
long positionUs = sampleCountToDurationUs(samplePosition, sampleRate);
|
||||
Entry<Range<Long>, Function<Pair<Long, Integer>, Float>> entry =
|
||||
checkNotNull(gainMap.getEntry(positionUs));
|
||||
|
||||
if (defaultGain != 1f
|
||||
|| entry.getValue().apply(Pair.create(samplePosition, sampleRate)) != GAIN_UNSET) {
|
||||
return C.TIME_UNSET;
|
||||
}
|
||||
|
||||
if (!entry.getKey().hasUpperBound()) {
|
||||
return C.TIME_END_OF_SOURCE;
|
||||
}
|
||||
|
||||
return durationUsToSampleCount(entry.getKey().upperEndpoint(), sampleRate);
|
||||
}
|
||||
}
|
@ -0,0 +1,154 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static java.lang.Math.min;
|
||||
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Objects;
|
||||
|
||||
/** Applies {@linkplain GainProvider gain automation} over an audio stream. */
|
||||
@UnstableApi
|
||||
public final class GainProcessor extends BaseAudioProcessor {
|
||||
|
||||
/** Interface that provides sample-level gain automation to be applied on an audio stream. */
|
||||
public interface GainProvider {
|
||||
/**
|
||||
* Returns a gain factor between [0f; 1f] to apply at the given sample position relative to
|
||||
* {@code sampleRate}.
|
||||
*
|
||||
* <p>Returned values must not change for the same pair of parameter values within the lifetime
|
||||
* of the instance.
|
||||
*/
|
||||
float getGainFactorAtSamplePosition(
|
||||
@IntRange(from = 0) long samplePosition, @IntRange(from = 1) int sampleRate);
|
||||
|
||||
/**
|
||||
* Returns the exclusive upper limit of the range starting at {@code samplePosition} where the
|
||||
* gain value is 1f (unity), or {@link C#TIME_UNSET} if {@code samplePosition} does not
|
||||
* correspond to a gain of 1f.
|
||||
*
|
||||
* <p>If the range continues until the end of the stream, this method returns {@link
|
||||
* C#TIME_END_OF_SOURCE}.
|
||||
*
|
||||
* <p>Returned values must not change for the same pair of parameter values within the lifetime
|
||||
* of the instance.
|
||||
*
|
||||
* @param samplePosition Inclusive starting position of the unity range.
|
||||
* @param sampleRate Sample rate in Hertz related to {@code samplePosition}.
|
||||
*/
|
||||
long isUnityUntil(@IntRange(from = 0) long samplePosition, @IntRange(from = 1) int sampleRate);
|
||||
}
|
||||
|
||||
private final GainProvider gainProvider;
|
||||
private long readFrames;
|
||||
|
||||
public GainProcessor(GainProvider gainProvider) {
|
||||
this.gainProvider = checkNotNull(gainProvider);
|
||||
}
|
||||
|
||||
@CanIgnoreReturnValue
|
||||
@Override
|
||||
public AudioFormat onConfigure(AudioFormat inputAudioFormat)
|
||||
throws UnhandledAudioFormatException {
|
||||
int encoding = inputAudioFormat.encoding;
|
||||
if (encoding != C.ENCODING_PCM_16BIT && encoding != C.ENCODING_PCM_FLOAT) {
|
||||
throw new UnhandledAudioFormatException(
|
||||
"Invalid PCM encoding. Expected 16 bit PCM or float PCM.", inputAudioFormat);
|
||||
}
|
||||
return inputAudioFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isActive() {
|
||||
return super.isActive()
|
||||
&& !Objects.equals(inputAudioFormat, AudioFormat.NOT_SET)
|
||||
&& gainProvider.isUnityUntil(/* samplePosition= */ 0, inputAudioFormat.sampleRate)
|
||||
!= C.TIME_END_OF_SOURCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInput(ByteBuffer inputBuffer) {
|
||||
checkState(
|
||||
!Objects.equals(inputAudioFormat, AudioFormat.NOT_SET),
|
||||
"Audio processor must be configured and flushed before calling queueInput().");
|
||||
|
||||
if (!inputBuffer.hasRemaining()) {
|
||||
return;
|
||||
}
|
||||
|
||||
checkArgument(
|
||||
inputBuffer.remaining() % inputAudioFormat.bytesPerFrame == 0,
|
||||
"Queued an incomplete frame.");
|
||||
|
||||
ByteBuffer buffer = replaceOutputBuffer(inputBuffer.remaining());
|
||||
|
||||
// Each iteration handles one frame.
|
||||
while (inputBuffer.hasRemaining()) {
|
||||
float gain =
|
||||
gainProvider.getGainFactorAtSamplePosition(readFrames, inputAudioFormat.sampleRate);
|
||||
if (gain == 1f) {
|
||||
int oldLimit = inputBuffer.limit();
|
||||
|
||||
long regionEnd = gainProvider.isUnityUntil(readFrames, inputAudioFormat.sampleRate);
|
||||
checkState(regionEnd != C.TIME_UNSET, "Expected a valid end boundary for unity region.");
|
||||
|
||||
// Only set limit if unity does not last until EoS.
|
||||
if (regionEnd != C.TIME_END_OF_SOURCE) {
|
||||
long limitOffsetBytes = (regionEnd - readFrames) * inputAudioFormat.bytesPerFrame;
|
||||
inputBuffer.limit(min(oldLimit, (int) limitOffsetBytes + inputBuffer.position()));
|
||||
}
|
||||
|
||||
readFrames += inputBuffer.remaining() / inputAudioFormat.bytesPerFrame;
|
||||
buffer.put(inputBuffer);
|
||||
inputBuffer.limit(oldLimit);
|
||||
} else {
|
||||
for (int i = 0; i < inputAudioFormat.channelCount; i++) {
|
||||
switch (inputAudioFormat.encoding) {
|
||||
case C.ENCODING_PCM_16BIT:
|
||||
buffer.putShort((short) (inputBuffer.getShort() * gain));
|
||||
break;
|
||||
case C.ENCODING_PCM_FLOAT:
|
||||
buffer.putFloat(inputBuffer.getFloat() * gain);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException(
|
||||
"Unexpected PCM encoding: " + inputAudioFormat.encoding);
|
||||
}
|
||||
}
|
||||
readFrames++;
|
||||
}
|
||||
}
|
||||
buffer.flip();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFlush() {
|
||||
readFrames = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onReset() {
|
||||
readFrames = 0;
|
||||
}
|
||||
}
|
@ -21,25 +21,24 @@ import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.SpeedProviderUtil.getNextSpeedChangeSamplePosition;
|
||||
import static androidx.media3.common.util.SpeedProviderUtil.getSampleAlignedSpeed;
|
||||
import static androidx.media3.common.util.Util.sampleCountToDurationUs;
|
||||
import static androidx.media3.common.util.Util.scaleLargeValue;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.Math.round;
|
||||
|
||||
import androidx.annotation.GuardedBy;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.annotation.VisibleForTesting;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.util.LongArray;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.util.LongArrayQueue;
|
||||
import androidx.media3.common.util.SpeedProviderUtil;
|
||||
import androidx.media3.common.util.TimestampConsumer;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import java.math.RoundingMode;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Queue;
|
||||
import java.util.function.LongConsumer;
|
||||
import org.checkerframework.checker.initialization.qual.UnknownInitialization;
|
||||
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
|
||||
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
|
||||
/**
|
||||
* An {@link AudioProcessor} that changes the speed of audio samples depending on their timestamp.
|
||||
@ -67,34 +66,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
@GuardedBy("lock")
|
||||
private final Queue<TimestampConsumer> pendingCallbacks;
|
||||
|
||||
// Elements in the same positions in the arrays are associated.
|
||||
|
||||
@GuardedBy("lock")
|
||||
private LongArray inputSegmentStartTimesUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private LongArray outputSegmentStartTimesUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastProcessedInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastSpeedAdjustedInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long lastSpeedAdjustedOutputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private long speedAdjustedTimeAsyncInputTimeUs;
|
||||
|
||||
@GuardedBy("lock")
|
||||
private float currentSpeed;
|
||||
|
||||
private long framesRead;
|
||||
|
||||
private boolean endOfStreamQueuedToSonic;
|
||||
|
||||
/** The current input audio format. */
|
||||
@GuardedBy("lock")
|
||||
private AudioFormat inputAudioFormat;
|
||||
|
||||
private AudioFormat pendingInputAudioFormat;
|
||||
@ -112,7 +89,6 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
new SynchronizedSonicAudioProcessor(lock, /* keepActiveWithDefaultParameters= */ true);
|
||||
pendingCallbackInputTimesUs = new LongArrayQueue();
|
||||
pendingCallbacks = new ArrayDeque<>();
|
||||
speedAdjustedTimeAsyncInputTimeUs = C.TIME_UNSET;
|
||||
resetInternalState(/* shouldResetSpeed= */ true);
|
||||
}
|
||||
|
||||
@ -120,10 +96,10 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
public static long getSampleCountAfterProcessorApplied(
|
||||
SpeedProvider speedProvider,
|
||||
@IntRange(from = 1) int inputSampleRateHz,
|
||||
@IntRange(from = 1) long inputSamples) {
|
||||
@IntRange(from = 0) long inputSamples) {
|
||||
checkArgument(speedProvider != null);
|
||||
checkArgument(inputSampleRateHz > 0);
|
||||
checkArgument(inputSamples > 0);
|
||||
checkArgument(inputSamples >= 0);
|
||||
|
||||
long outputSamples = 0;
|
||||
long positionSamples = 0;
|
||||
@ -171,18 +147,22 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
|
||||
@Override
|
||||
public void queueInput(ByteBuffer inputBuffer) {
|
||||
long currentTimeUs = sampleCountToDurationUs(framesRead, inputAudioFormat.sampleRate);
|
||||
float newSpeed = getSampleAlignedSpeed(speedProvider, framesRead, inputAudioFormat.sampleRate);
|
||||
long nextSpeedChangeSamplePosition =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, framesRead, inputAudioFormat.sampleRate);
|
||||
AudioFormat format;
|
||||
synchronized (lock) {
|
||||
format = inputAudioFormat;
|
||||
}
|
||||
|
||||
updateSpeed(newSpeed, currentTimeUs);
|
||||
float newSpeed = getSampleAlignedSpeed(speedProvider, framesRead, format.sampleRate);
|
||||
long nextSpeedChangeSamplePosition =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, framesRead, format.sampleRate);
|
||||
|
||||
updateSpeed(newSpeed);
|
||||
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
int bytesToNextSpeedChange;
|
||||
if (nextSpeedChangeSamplePosition != C.INDEX_UNSET) {
|
||||
bytesToNextSpeedChange =
|
||||
(int) ((nextSpeedChangeSamplePosition - framesRead) * inputAudioFormat.bytesPerFrame);
|
||||
(int) ((nextSpeedChangeSamplePosition - framesRead) * format.bytesPerFrame);
|
||||
// Update the input buffer limit to make sure that all samples processed have the same speed.
|
||||
inputBuffer.limit(min(inputBufferLimit, inputBuffer.position() + bytesToNextSpeedChange));
|
||||
} else {
|
||||
@ -197,10 +177,8 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
endOfStreamQueuedToSonic = true;
|
||||
}
|
||||
long bytesRead = inputBuffer.position() - startPosition;
|
||||
checkState(
|
||||
bytesRead % inputAudioFormat.bytesPerFrame == 0, "A frame was not queued completely.");
|
||||
framesRead += bytesRead / inputAudioFormat.bytesPerFrame;
|
||||
updateLastProcessedInputTime();
|
||||
checkState(bytesRead % format.bytesPerFrame == 0, "A frame was not queued completely.");
|
||||
framesRead += bytesRead / format.bytesPerFrame;
|
||||
inputBuffer.limit(inputBufferLimit);
|
||||
}
|
||||
|
||||
@ -215,9 +193,7 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
|
||||
@Override
|
||||
public ByteBuffer getOutput() {
|
||||
ByteBuffer output = sonicAudioProcessor.getOutput();
|
||||
processPendingCallbacks();
|
||||
return output;
|
||||
return sonicAudioProcessor.getOutput();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -228,9 +204,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
@Override
|
||||
public void flush() {
|
||||
inputEnded = false;
|
||||
inputAudioFormat = pendingInputAudioFormat;
|
||||
resetInternalState(/* shouldResetSpeed= */ false);
|
||||
sonicAudioProcessor.flush();
|
||||
synchronized (lock) {
|
||||
inputAudioFormat = pendingInputAudioFormat;
|
||||
sonicAudioProcessor.flush();
|
||||
processPendingCallbacks();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -238,7 +217,11 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
flush();
|
||||
pendingInputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingOutputAudioFormat = AudioFormat.NOT_SET;
|
||||
inputAudioFormat = AudioFormat.NOT_SET;
|
||||
synchronized (lock) {
|
||||
inputAudioFormat = AudioFormat.NOT_SET;
|
||||
pendingCallbackInputTimesUs.clear();
|
||||
pendingCallbacks.clear();
|
||||
}
|
||||
resetInternalState(/* shouldResetSpeed= */ true);
|
||||
sonicAudioProcessor.reset();
|
||||
}
|
||||
@ -261,154 +244,125 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
* @param callback The callback called with the output time. May be called on a different thread
|
||||
* from the caller of this method.
|
||||
*/
|
||||
// TODO(b/381553948): Accept an executor on which to dispatch the callback.
|
||||
public void getSpeedAdjustedTimeAsync(long inputTimeUs, TimestampConsumer callback) {
|
||||
int sampleRate;
|
||||
synchronized (lock) {
|
||||
checkArgument(speedAdjustedTimeAsyncInputTimeUs < inputTimeUs);
|
||||
speedAdjustedTimeAsyncInputTimeUs = inputTimeUs;
|
||||
if ((inputTimeUs <= lastProcessedInputTimeUs && pendingCallbackInputTimesUs.isEmpty())
|
||||
|| isEnded()) {
|
||||
callback.onTimestamp(calculateSpeedAdjustedTime(inputTimeUs));
|
||||
sampleRate = inputAudioFormat.sampleRate;
|
||||
|
||||
if (sampleRate == Format.NO_VALUE) {
|
||||
pendingCallbackInputTimesUs.add(inputTimeUs);
|
||||
pendingCallbacks.add(callback);
|
||||
return;
|
||||
}
|
||||
pendingCallbackInputTimesUs.add(inputTimeUs);
|
||||
pendingCallbacks.add(callback);
|
||||
}
|
||||
// TODO(b/381553948): Use an executor to invoke callback.
|
||||
callback.onTimestamp(
|
||||
getDurationUsAfterProcessorApplied(speedProvider, sampleRate, inputTimeUs));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the input media duration for the given playout duration.
|
||||
* Returns the input media duration in microseconds for the given playout duration.
|
||||
*
|
||||
* <p>Both durations are counted from the last {@link #reset()} or {@link #flush()} of the audio
|
||||
* processor.
|
||||
*
|
||||
* <p>The {@code playoutDurationUs} must be less than last processed buffer output time.
|
||||
* <p>This method returns the inverse of {@link #getSpeedAdjustedTimeAsync} when the instance has
|
||||
* been configured and flushed. Otherwise, it returns {@code playoutDurationUs}.
|
||||
*
|
||||
* @param playoutDurationUs The playout duration in microseconds.
|
||||
* @return The corresponding input duration in microseconds.
|
||||
*/
|
||||
public long getMediaDurationUs(long playoutDurationUs) {
|
||||
int sampleRate;
|
||||
synchronized (lock) {
|
||||
int floorIndex = outputSegmentStartTimesUs.size() - 1;
|
||||
while (floorIndex > 0 && outputSegmentStartTimesUs.get(floorIndex) > playoutDurationUs) {
|
||||
floorIndex--;
|
||||
}
|
||||
long lastSegmentOutputDurationUs =
|
||||
playoutDurationUs - outputSegmentStartTimesUs.get(floorIndex);
|
||||
long lastSegmentInputDurationUs;
|
||||
if (floorIndex == outputSegmentStartTimesUs.size() - 1) {
|
||||
lastSegmentInputDurationUs = getMediaDurationUsAtCurrentSpeed(lastSegmentOutputDurationUs);
|
||||
|
||||
} else {
|
||||
lastSegmentInputDurationUs =
|
||||
round(
|
||||
lastSegmentOutputDurationUs
|
||||
* divide(
|
||||
inputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- inputSegmentStartTimesUs.get(floorIndex),
|
||||
outputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- outputSegmentStartTimesUs.get(floorIndex)));
|
||||
}
|
||||
return inputSegmentStartTimesUs.get(floorIndex) + lastSegmentInputDurationUs;
|
||||
sampleRate = inputAudioFormat.sampleRate;
|
||||
}
|
||||
if (sampleRate == Format.NO_VALUE) {
|
||||
return playoutDurationUs;
|
||||
}
|
||||
long outputSamples =
|
||||
scaleLargeValue(playoutDurationUs, sampleRate, C.MICROS_PER_SECOND, RoundingMode.HALF_EVEN);
|
||||
long inputSamples = getInputFrameCountForOutput(speedProvider, sampleRate, outputSamples);
|
||||
return sampleCountToDurationUs(inputSamples, sampleRate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assuming enough audio has been processed, calculates the time at which the {@code inputTimeUs}
|
||||
* is outputted at after the speed changes has been applied.
|
||||
* Returns the number of input frames needed to output a specific number of frames, given a speed
|
||||
* provider, input sample rate, and number of output frames.
|
||||
*
|
||||
* <p>This is the inverse operation of {@link #getSampleCountAfterProcessorApplied}.
|
||||
*/
|
||||
@SuppressWarnings("GuardedBy") // All call sites are guarded.
|
||||
private long calculateSpeedAdjustedTime(long inputTimeUs) {
|
||||
int floorIndex = inputSegmentStartTimesUs.size() - 1;
|
||||
while (floorIndex > 0 && inputSegmentStartTimesUs.get(floorIndex) > inputTimeUs) {
|
||||
floorIndex--;
|
||||
}
|
||||
long lastSegmentOutputDurationUs;
|
||||
if (floorIndex == inputSegmentStartTimesUs.size() - 1) {
|
||||
if (lastSpeedAdjustedInputTimeUs < inputSegmentStartTimesUs.get(floorIndex)) {
|
||||
lastSpeedAdjustedInputTimeUs = inputSegmentStartTimesUs.get(floorIndex);
|
||||
lastSpeedAdjustedOutputTimeUs = outputSegmentStartTimesUs.get(floorIndex);
|
||||
@VisibleForTesting
|
||||
/* package */ static long getInputFrameCountForOutput(
|
||||
SpeedProvider speedProvider,
|
||||
@IntRange(from = 1) int inputSampleRate,
|
||||
@IntRange(from = 0) long outputFrameCount) {
|
||||
checkArgument(inputSampleRate > 0);
|
||||
checkArgument(outputFrameCount >= 0);
|
||||
|
||||
long inputSampleCount = 0;
|
||||
while (outputFrameCount > 0) {
|
||||
long boundarySamples =
|
||||
getNextSpeedChangeSamplePosition(speedProvider, inputSampleCount, inputSampleRate);
|
||||
float speed = getSampleAlignedSpeed(speedProvider, inputSampleCount, inputSampleRate);
|
||||
|
||||
long outputSamplesForSection =
|
||||
Sonic.getExpectedFrameCountAfterProcessorApplied(
|
||||
/* inputSampleRateHz= */ inputSampleRate,
|
||||
/* outputSampleRateHz= */ inputSampleRate,
|
||||
/* speed= */ speed,
|
||||
/* pitch= */ speed,
|
||||
/* inputFrameCount= */ boundarySamples - inputSampleCount);
|
||||
|
||||
if (boundarySamples == C.INDEX_UNSET || outputSamplesForSection > outputFrameCount) {
|
||||
inputSampleCount +=
|
||||
Sonic.getExpectedInputFrameCountForOutputFrameCount(
|
||||
/* inputSampleRateHz= */ inputSampleRate,
|
||||
/* outputSampleRateHz= */ inputSampleRate,
|
||||
/* speed= */ speed,
|
||||
/* pitch= */ speed,
|
||||
outputFrameCount);
|
||||
outputFrameCount = 0;
|
||||
} else {
|
||||
outputFrameCount -= outputSamplesForSection;
|
||||
inputSampleCount = boundarySamples;
|
||||
}
|
||||
long lastSegmentInputDurationUs = inputTimeUs - lastSpeedAdjustedInputTimeUs;
|
||||
lastSegmentOutputDurationUs = getPlayoutDurationUsAtCurrentSpeed(lastSegmentInputDurationUs);
|
||||
} else {
|
||||
long lastSegmentInputDurationUs = inputTimeUs - lastSpeedAdjustedInputTimeUs;
|
||||
lastSegmentOutputDurationUs =
|
||||
round(
|
||||
lastSegmentInputDurationUs
|
||||
* divide(
|
||||
outputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- outputSegmentStartTimesUs.get(floorIndex),
|
||||
inputSegmentStartTimesUs.get(floorIndex + 1)
|
||||
- inputSegmentStartTimesUs.get(floorIndex)));
|
||||
}
|
||||
lastSpeedAdjustedInputTimeUs = inputTimeUs;
|
||||
lastSpeedAdjustedOutputTimeUs += lastSegmentOutputDurationUs;
|
||||
return lastSpeedAdjustedOutputTimeUs;
|
||||
|
||||
return inputSampleCount;
|
||||
}
|
||||
|
||||
private static double divide(long dividend, long divisor) {
|
||||
return ((double) dividend) / divisor;
|
||||
private static long getDurationUsAfterProcessorApplied(
|
||||
SpeedProvider speedProvider, int sampleRate, long inputDurationUs) {
|
||||
long inputSamples =
|
||||
scaleLargeValue(inputDurationUs, sampleRate, C.MICROS_PER_SECOND, RoundingMode.HALF_EVEN);
|
||||
long outputSamples =
|
||||
getSampleCountAfterProcessorApplied(speedProvider, sampleRate, inputSamples);
|
||||
return sampleCountToDurationUs(outputSamples, sampleRate);
|
||||
}
|
||||
|
||||
private void processPendingCallbacks() {
|
||||
synchronized (lock) {
|
||||
while (!pendingCallbacks.isEmpty()
|
||||
&& (pendingCallbackInputTimesUs.element() <= lastProcessedInputTimeUs || isEnded())) {
|
||||
pendingCallbacks
|
||||
.remove()
|
||||
.onTimestamp(calculateSpeedAdjustedTime(pendingCallbackInputTimesUs.remove()));
|
||||
if (inputAudioFormat.sampleRate == Format.NO_VALUE) {
|
||||
return;
|
||||
}
|
||||
|
||||
while (!pendingCallbacks.isEmpty()) {
|
||||
long inputTimeUs = pendingCallbackInputTimesUs.remove();
|
||||
TimestampConsumer consumer = pendingCallbacks.remove();
|
||||
// TODO(b/381553948): Use an executor to invoke callback.
|
||||
consumer.onTimestamp(
|
||||
getDurationUsAfterProcessorApplied(
|
||||
speedProvider, inputAudioFormat.sampleRate, inputTimeUs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void updateSpeed(float newSpeed, long timeUs) {
|
||||
synchronized (lock) {
|
||||
if (newSpeed != currentSpeed) {
|
||||
updateSpeedChangeArrays(timeUs);
|
||||
currentSpeed = newSpeed;
|
||||
sonicAudioProcessor.setSpeed(newSpeed);
|
||||
sonicAudioProcessor.setPitch(newSpeed);
|
||||
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
||||
sonicAudioProcessor.flush();
|
||||
endOfStreamQueuedToSonic = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("GuardedBy") // All call sites are guarded.
|
||||
private void updateSpeedChangeArrays(long currentSpeedChangeInputTimeUs) {
|
||||
long lastSpeedChangeOutputTimeUs =
|
||||
outputSegmentStartTimesUs.get(outputSegmentStartTimesUs.size() - 1);
|
||||
long lastSpeedChangeInputTimeUs =
|
||||
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1);
|
||||
long lastSpeedSegmentMediaDurationUs =
|
||||
currentSpeedChangeInputTimeUs - lastSpeedChangeInputTimeUs;
|
||||
inputSegmentStartTimesUs.add(currentSpeedChangeInputTimeUs);
|
||||
outputSegmentStartTimesUs.add(
|
||||
lastSpeedChangeOutputTimeUs
|
||||
+ getPlayoutDurationUsAtCurrentSpeed(lastSpeedSegmentMediaDurationUs));
|
||||
}
|
||||
|
||||
private long getPlayoutDurationUsAtCurrentSpeed(long mediaDurationUs) {
|
||||
return sonicAudioProcessor.getPlayoutDuration(mediaDurationUs);
|
||||
}
|
||||
|
||||
private long getMediaDurationUsAtCurrentSpeed(long playoutDurationUs) {
|
||||
return sonicAudioProcessor.getMediaDuration(playoutDurationUs);
|
||||
}
|
||||
|
||||
private void updateLastProcessedInputTime() {
|
||||
synchronized (lock) {
|
||||
// TODO - b/320242819: Investigate whether bytesRead can be used here rather than
|
||||
// sonicAudioProcessor.getProcessedInputBytes().
|
||||
long currentProcessedInputDurationUs =
|
||||
Util.scaleLargeTimestamp(
|
||||
/* timestamp= */ sonicAudioProcessor.getProcessedInputBytes(),
|
||||
/* multiplier= */ C.MICROS_PER_SECOND,
|
||||
/* divisor= */ (long) inputAudioFormat.sampleRate * inputAudioFormat.bytesPerFrame);
|
||||
lastProcessedInputTimeUs =
|
||||
inputSegmentStartTimesUs.get(inputSegmentStartTimesUs.size() - 1)
|
||||
+ currentProcessedInputDurationUs;
|
||||
private void updateSpeed(float newSpeed) {
|
||||
if (newSpeed != currentSpeed) {
|
||||
currentSpeed = newSpeed;
|
||||
sonicAudioProcessor.setSpeed(newSpeed);
|
||||
sonicAudioProcessor.setPitch(newSpeed);
|
||||
// Invalidate any previously created buffers in SonicAudioProcessor and the base class.
|
||||
sonicAudioProcessor.flush();
|
||||
endOfStreamQueuedToSonic = false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -420,28 +374,12 @@ public final class SpeedChangingAudioProcessor implements AudioProcessor {
|
||||
*
|
||||
* @param shouldResetSpeed Whether {@link #currentSpeed} should be reset to its default value.
|
||||
*/
|
||||
@EnsuresNonNull({"inputSegmentStartTimesUs", "outputSegmentStartTimesUs"})
|
||||
@RequiresNonNull("lock")
|
||||
private void resetInternalState(
|
||||
@UnknownInitialization SpeedChangingAudioProcessor this, boolean shouldResetSpeed) {
|
||||
synchronized (lock) {
|
||||
inputSegmentStartTimesUs = new LongArray();
|
||||
outputSegmentStartTimesUs = new LongArray();
|
||||
inputSegmentStartTimesUs.add(0);
|
||||
outputSegmentStartTimesUs.add(0);
|
||||
lastProcessedInputTimeUs = 0;
|
||||
lastSpeedAdjustedInputTimeUs = 0;
|
||||
lastSpeedAdjustedOutputTimeUs = 0;
|
||||
if (shouldResetSpeed) {
|
||||
currentSpeed = 1f;
|
||||
}
|
||||
if (shouldResetSpeed) {
|
||||
currentSpeed = 1f;
|
||||
}
|
||||
|
||||
framesRead = 0;
|
||||
endOfStreamQueuedToSonic = false;
|
||||
// TODO: b/339842724 - This should ideally also reset speedAdjustedTimeAsyncInputTimeUs and
|
||||
// clear pendingCallbacks and pendingCallbacksInputTimes. We can't do this at the moment
|
||||
// because some clients register callbacks with getSpeedAdjustedTimeAsync before this audio
|
||||
// processor is flushed.
|
||||
}
|
||||
}
|
||||
|
@ -40,7 +40,6 @@ import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.util.Assertions;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.lang.annotation.Documented;
|
||||
@ -48,6 +47,7 @@ import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Objects;
|
||||
import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
/** Contains information about a specific cue, including textual content and formatting data. */
|
||||
@ -396,7 +396,7 @@ public final class Cue {
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(
|
||||
return Objects.hash(
|
||||
text,
|
||||
textAlignment,
|
||||
multiRowAlignment,
|
||||
|
@ -31,6 +31,8 @@ public final class BackgroundExecutor {
|
||||
*
|
||||
* <p>Must only be used for quick, high-priority tasks to ensure other background tasks are not
|
||||
* blocked.
|
||||
*
|
||||
* <p>The thread is guaranteed to be alive for the lifetime of the application.
|
||||
*/
|
||||
public static synchronized Executor get() {
|
||||
if (staticInstance == null) {
|
||||
@ -42,6 +44,9 @@ public final class BackgroundExecutor {
|
||||
/**
|
||||
* Sets the {@link Executor} to be returned from {@link #get()}.
|
||||
*
|
||||
* <p>Note that the thread of the provided {@link Executor} must stay alive for the lifetime of
|
||||
* the application.
|
||||
*
|
||||
* @param executor An {@link Executor} that runs tasks on background threads and should only be
|
||||
* used for quick, high-priority tasks to ensure other background tasks are not blocked.
|
||||
*/
|
||||
|
@ -43,6 +43,7 @@ import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
/** OpenGL ES utilities. */
|
||||
@ -209,7 +210,7 @@ public final class GlUtil {
|
||||
*/
|
||||
public static boolean isYuvTargetExtensionSupported() {
|
||||
@Nullable String glExtensions;
|
||||
if (Util.areEqual(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT)) {
|
||||
if (Objects.equals(EGL14.eglGetCurrentContext(), EGL14.EGL_NO_CONTEXT)) {
|
||||
// Create a placeholder context and make it current to allow calling GLES20.glGetString().
|
||||
try {
|
||||
EGLDisplay eglDisplay = getDefaultEglDisplay();
|
||||
@ -227,6 +228,16 @@ public final class GlUtil {
|
||||
return glExtensions != null && glExtensions.contains(EXTENSION_YUV_TARGET);
|
||||
}
|
||||
|
||||
/** Returns whether the given {@link C.ColorTransfer} is supported. */
|
||||
public static boolean isColorTransferSupported(@C.ColorTransfer int colorTransfer) {
|
||||
if (colorTransfer == C.COLOR_TRANSFER_ST2084) {
|
||||
return GlUtil.isBt2020PqExtensionSupported();
|
||||
} else if (colorTransfer == C.COLOR_TRANSFER_HLG) {
|
||||
return GlUtil.isBt2020HlgExtensionSupported();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Returns whether {@link #EXTENSION_COLORSPACE_BT2020_PQ} is supported. */
|
||||
public static boolean isBt2020PqExtensionSupported() {
|
||||
// On API<33, the system cannot display PQ content correctly regardless of whether BT2020 PQ
|
||||
|
@ -128,6 +128,10 @@ public final class MediaFormatUtil {
|
||||
|
||||
formatBuilder.setInitializationData(csdBuffers.build());
|
||||
|
||||
if (mediaFormat.containsKey(MediaFormat.KEY_TRACK_ID)) {
|
||||
formatBuilder.setId(mediaFormat.getInteger(MediaFormat.KEY_TRACK_ID));
|
||||
}
|
||||
|
||||
return formatBuilder.build();
|
||||
}
|
||||
|
||||
@ -175,6 +179,10 @@ public final class MediaFormatUtil {
|
||||
result.setInteger(MediaFormat.KEY_ENCODER_PADDING, format.encoderPadding);
|
||||
|
||||
maybeSetPixelAspectRatio(result, format.pixelWidthHeightRatio);
|
||||
|
||||
if (format.id != null) {
|
||||
result.setInteger(MediaFormat.KEY_TRACK_ID, Integer.parseInt(format.id));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -15,13 +15,18 @@
|
||||
*/
|
||||
package androidx.media3.common.util;
|
||||
|
||||
import static java.nio.ByteOrder.BIG_ENDIAN;
|
||||
import static java.nio.ByteOrder.LITTLE_ENDIAN;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.primitives.Chars;
|
||||
import com.google.common.primitives.Ints;
|
||||
import com.google.common.primitives.UnsignedBytes;
|
||||
import com.google.common.primitives.UnsignedInts;
|
||||
import com.google.errorprone.annotations.CheckReturnValue;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
@ -34,6 +39,9 @@ import java.util.Arrays;
|
||||
@CheckReturnValue
|
||||
public final class ParsableByteArray {
|
||||
|
||||
/** A value that is outside the valid range of unicode code points. */
|
||||
public static final int INVALID_CODE_POINT = 0x11_0000;
|
||||
|
||||
private static final char[] CR_AND_LF = {'\r', '\n'};
|
||||
private static final char[] LF = {'\n'};
|
||||
private static final ImmutableSet<Charset> SUPPORTED_CHARSETS_FOR_READLINE =
|
||||
@ -134,7 +142,7 @@ public final class ParsableByteArray {
|
||||
|
||||
/** Returns the number of bytes yet to be read. */
|
||||
public int bytesLeft() {
|
||||
return limit - position;
|
||||
return Math.max(limit - position, 0);
|
||||
}
|
||||
|
||||
/** Returns the limit. */
|
||||
@ -240,26 +248,74 @@ public final class ParsableByteArray {
|
||||
return (data[position] & 0xFF);
|
||||
}
|
||||
|
||||
/**
|
||||
* Peeks at the next char.
|
||||
*
|
||||
* <p>Equivalent to passing {@link StandardCharsets#UTF_16} or {@link StandardCharsets#UTF_16BE}
|
||||
* to {@link #peekChar(Charset)}.
|
||||
*/
|
||||
/** Peeks at the next two bytes and interprets them as a big-endian char. */
|
||||
public char peekChar() {
|
||||
return (char) ((data[position] & 0xFF) << 8 | (data[position + 1] & 0xFF));
|
||||
return peekChar(BIG_ENDIAN, /* offset= */ 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Peeks at the next char (as decoded by {@code charset})
|
||||
*
|
||||
* @throws IllegalArgumentException if charset is not supported. Only US_ASCII, UTF-8, UTF-16,
|
||||
* UTF-16BE, and UTF-16LE are supported.
|
||||
* @deprecated Either use {@link #peekChar()} to peek the next two bytes (big-endian) or {@link
|
||||
* #peekCodePoint(Charset)} to peek in a {@link Charset}-aware way.
|
||||
*/
|
||||
@Deprecated
|
||||
public char peekChar(Charset charset) {
|
||||
Assertions.checkArgument(
|
||||
SUPPORTED_CHARSETS_FOR_READLINE.contains(charset), "Unsupported charset: " + charset);
|
||||
return (char) (peekCharacterAndSize(charset) >> Short.SIZE);
|
||||
if (bytesLeft() == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (charset.equals(StandardCharsets.US_ASCII)) {
|
||||
return (char) peekUnsignedByte();
|
||||
} else if (charset.equals(StandardCharsets.UTF_8)) {
|
||||
return (data[position] & 0x80) == 0 ? (char) peekUnsignedByte() : 0;
|
||||
} else {
|
||||
// UTF-16
|
||||
if (bytesLeft() < 2) {
|
||||
return 0;
|
||||
}
|
||||
ByteOrder byteOrder = charset.equals(StandardCharsets.UTF_16LE) ? LITTLE_ENDIAN : BIG_ENDIAN;
|
||||
return peekChar(byteOrder, /* offset= */ 0);
|
||||
}
|
||||
}
|
||||
|
||||
/** Peek the UTF-16 char at {@link #position}{@code + offset}. */
|
||||
private char peekChar(ByteOrder byteOrder, int offset) {
|
||||
return byteOrder == BIG_ENDIAN
|
||||
? Chars.fromBytes(data[position + offset], data[position + offset + 1])
|
||||
: Chars.fromBytes(data[position + offset + 1], data[position + offset]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Peeks at the code point starting at {@link #getPosition()} as interpreted by {@code charset}.
|
||||
*
|
||||
* <p>The exact behaviour depends on {@code charset}:
|
||||
*
|
||||
* <ul>
|
||||
* <li>US_ASCII: Returns the byte at {@link #getPosition()} if it's valid ASCII (less than
|
||||
* {@code 0x80}), otherwise returns {@link #INVALID_CODE_POINT}.
|
||||
* <li>UTF-8: If {@link #getPosition()} is the start of a UTF-8 code unit the whole unit is
|
||||
* decoded and returned. Otherwise {@link #INVALID_CODE_POINT} is returned.
|
||||
* <li>UTF-16 (all endian-nesses):
|
||||
* <ul>
|
||||
* <li>If {@link #getPosition()} is at the start of a {@linkplain
|
||||
* Character#isHighSurrogate(char) high surrogate} code unit and the following two
|
||||
* bytes are a {@linkplain Character#isLowSurrogate(char)} low surrogate} code unit,
|
||||
* the {@linkplain Character#toCodePoint(char, char) combined code point} is returned.
|
||||
* <li>Otherwise the single code unit starting at {@link #getPosition()} is returned
|
||||
* directly.
|
||||
* <li>UTF-16 has no support for byte-level synchronization, so if {@link #getPosition()}
|
||||
* is not aligned with the start of a UTF-16 code unit then the result is undefined.
|
||||
* </ul>
|
||||
* </ul>
|
||||
*
|
||||
* @throws IllegalArgumentException if charset is not supported. Only US_ASCII, UTF-8, UTF-16,
|
||||
* UTF-16BE, and UTF-16LE are supported.
|
||||
* @throws IndexOutOfBoundsException if {@link #bytesLeft()} doesn't allow reading the smallest
|
||||
* code unit in {@code charset} (1 byte for ASCII and UTF-8, 2 bytes for UTF-16).
|
||||
*/
|
||||
public int peekCodePoint(Charset charset) {
|
||||
int codePointAndSize = peekCodePointAndSize(charset);
|
||||
return codePointAndSize != 0 ? Ints.checkedCast(codePointAndSize >>> 8) : INVALID_CODE_POINT;
|
||||
}
|
||||
|
||||
/** Reads the next byte as an unsigned value. */
|
||||
@ -705,48 +761,145 @@ public final class ParsableByteArray {
|
||||
* without advancing {@link #position}. Returns {@code 0} if {@link #bytesLeft()} doesn't allow
|
||||
* reading a whole character in {@code charset}.
|
||||
*
|
||||
* <p>Only supports characters in {@code chars} that occupy a single code unit (i.e. one byte for
|
||||
* UTF-8 and two bytes for UTF-16).
|
||||
* <p>Only supports characters in {@code chars} that are in the Basic Multilingual Plane (occupy a
|
||||
* single char).
|
||||
*/
|
||||
private char readCharacterIfInList(Charset charset, char[] chars) {
|
||||
int characterAndSize = peekCharacterAndSize(charset);
|
||||
if (bytesLeft() < getSmallestCodeUnitSize(charset)) {
|
||||
return 0;
|
||||
}
|
||||
int codePointAndSize = peekCodePointAndSize(charset);
|
||||
if (codePointAndSize == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (characterAndSize != 0 && Chars.contains(chars, (char) (characterAndSize >> Short.SIZE))) {
|
||||
position += characterAndSize & 0xFFFF;
|
||||
return (char) (characterAndSize >> Short.SIZE);
|
||||
int codePoint = UnsignedInts.checkedCast(codePointAndSize >>> 8);
|
||||
if (Character.isSupplementaryCodePoint(codePoint)) {
|
||||
return 0;
|
||||
}
|
||||
char c = Chars.checkedCast(codePoint);
|
||||
if (Chars.contains(chars, c)) {
|
||||
position += Ints.checkedCast(codePointAndSize & 0xFF);
|
||||
return c;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Peeks at the character at {@link #position} (as decoded by {@code charset}), returns it and the
|
||||
* number of bytes the character takes up within the array packed into an int. First four bytes
|
||||
* are the character and the second four is the size in bytes it takes. Returns 0 if {@link
|
||||
* #bytesLeft()} doesn't allow reading a whole character in {@code charset} or if the {@code
|
||||
* charset} is not one of US_ASCII, UTF-8, UTF-16, UTF-16BE, or UTF-16LE.
|
||||
* Peeks at the code unit at {@link #position} (as decoded by {@code charset}), and the number of
|
||||
* bytes it occupies within {@link #data}.
|
||||
*
|
||||
* <p>Only supports characters that occupy a single code unit (i.e. one byte for UTF-8 and two
|
||||
* bytes for UTF-16).
|
||||
* <p>See {@link #peekCodePoint(Charset)} for detailed per-charset behaviour & edge cases.
|
||||
*
|
||||
* @return The code point in the upper 24 bits, and the size in bytes in the lower 8 bits. Or zero
|
||||
* if no valid code unit starts at {@link #position} and fits within {@link #bytesLeft()}.
|
||||
* @throws IndexOutOfBoundsException if {@link #bytesLeft()} doesn't allow reading the smallest
|
||||
* code unit in {@code charset} (1 byte for ASCII and UTF-8, 2 bytes for UTF-16).
|
||||
* @throws IllegalArgumentException if charset is not supported. Only US_ASCII, UTF-8, UTF-16,
|
||||
* UTF-16BE, and UTF-16LE are supported.
|
||||
*/
|
||||
private int peekCharacterAndSize(Charset charset) {
|
||||
byte character;
|
||||
short characterSize;
|
||||
if ((charset.equals(StandardCharsets.UTF_8) || charset.equals(StandardCharsets.US_ASCII))
|
||||
&& bytesLeft() >= 1) {
|
||||
character = (byte) Chars.checkedCast(UnsignedBytes.toInt(data[position]));
|
||||
characterSize = 1;
|
||||
} else if ((charset.equals(StandardCharsets.UTF_16)
|
||||
|| charset.equals(StandardCharsets.UTF_16BE))
|
||||
&& bytesLeft() >= 2) {
|
||||
character = (byte) Chars.fromBytes(data[position], data[position + 1]);
|
||||
characterSize = 2;
|
||||
} else if (charset.equals(StandardCharsets.UTF_16LE) && bytesLeft() >= 2) {
|
||||
character = (byte) Chars.fromBytes(data[position + 1], data[position]);
|
||||
characterSize = 2;
|
||||
private int peekCodePointAndSize(Charset charset) {
|
||||
Assertions.checkArgument(
|
||||
SUPPORTED_CHARSETS_FOR_READLINE.contains(charset), "Unsupported charset: " + charset);
|
||||
if (bytesLeft() < getSmallestCodeUnitSize(charset)) {
|
||||
throw new IndexOutOfBoundsException("position=" + position + ", limit=" + limit);
|
||||
}
|
||||
int codePoint;
|
||||
byte codePointSize;
|
||||
if (charset.equals(StandardCharsets.US_ASCII)) {
|
||||
if ((data[position] & 0x80) != 0) {
|
||||
return 0;
|
||||
}
|
||||
codePoint = UnsignedBytes.toInt(data[position]);
|
||||
codePointSize = 1;
|
||||
} else if (charset.equals(StandardCharsets.UTF_8)) {
|
||||
codePointSize = peekUtf8CodeUnitSize();
|
||||
switch (codePointSize) {
|
||||
case 1:
|
||||
codePoint = UnsignedBytes.toInt(data[position]);
|
||||
break;
|
||||
case 2:
|
||||
codePoint = decodeUtf8CodeUnit(0, 0, data[position], data[position + 1]);
|
||||
break;
|
||||
case 3:
|
||||
int firstByteWithoutStartCode = data[position] & 0xF;
|
||||
codePoint =
|
||||
decodeUtf8CodeUnit(
|
||||
0, firstByteWithoutStartCode, data[position + 1], data[position + 2]);
|
||||
break;
|
||||
case 4:
|
||||
codePoint =
|
||||
decodeUtf8CodeUnit(
|
||||
data[position], data[position + 1], data[position + 2], data[position + 3]);
|
||||
break;
|
||||
case 0:
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
// UTF-16
|
||||
ByteOrder byteOrder = charset.equals(StandardCharsets.UTF_16LE) ? LITTLE_ENDIAN : BIG_ENDIAN;
|
||||
char c = peekChar(byteOrder, /* offset= */ 0);
|
||||
if (Character.isHighSurrogate(c) && bytesLeft() >= 4) {
|
||||
char lowSurrogate = peekChar(byteOrder, /* offset= */ 2);
|
||||
codePoint = Character.toCodePoint(c, lowSurrogate);
|
||||
codePointSize = 4;
|
||||
} else {
|
||||
// This is either a BMP code point, an unpaired surrogate, or position is in the middle of
|
||||
// a matching surrogate pair.
|
||||
codePoint = c;
|
||||
codePointSize = 2;
|
||||
}
|
||||
}
|
||||
return (codePoint << 8) | codePointSize;
|
||||
}
|
||||
|
||||
private static int getSmallestCodeUnitSize(Charset charset) {
|
||||
Assertions.checkArgument(
|
||||
SUPPORTED_CHARSETS_FOR_READLINE.contains(charset), "Unsupported charset: " + charset);
|
||||
return charset.equals(StandardCharsets.UTF_8) || charset.equals(StandardCharsets.US_ASCII)
|
||||
? 1
|
||||
: 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size (in bytes) of the UTF-8 code unit starting at {@link #position}. Returns zero
|
||||
* if no full UTF-8 code unit seems to start at {@link #position}.
|
||||
*/
|
||||
private byte peekUtf8CodeUnitSize() {
|
||||
if ((data[position] & 0x80) == 0) {
|
||||
return 1;
|
||||
} else if ((data[position] & 0xE0) == 0xC0
|
||||
&& bytesLeft() >= 2
|
||||
&& isUtf8ContinuationByte(data[position + 1])) {
|
||||
return 2;
|
||||
} else if ((data[position] & 0xF0) == 0xE0
|
||||
&& bytesLeft() >= 3
|
||||
&& isUtf8ContinuationByte(data[position + 1])
|
||||
&& isUtf8ContinuationByte(data[position + 2])) {
|
||||
return 3;
|
||||
} else if ((data[position] & 0xF8) == 0xF0
|
||||
&& bytesLeft() >= 4
|
||||
&& isUtf8ContinuationByte(data[position + 1])
|
||||
&& isUtf8ContinuationByte(data[position + 2])
|
||||
&& isUtf8ContinuationByte(data[position + 3])) {
|
||||
return 4;
|
||||
} else {
|
||||
// We found a pattern that doesn't seem to be valid UTF-8.
|
||||
return 0;
|
||||
}
|
||||
return (Chars.checkedCast(character) << Short.SIZE) + characterSize;
|
||||
}
|
||||
|
||||
private static boolean isUtf8ContinuationByte(byte b) {
|
||||
return (b & 0xC0) == 0x80;
|
||||
}
|
||||
|
||||
private static int decodeUtf8CodeUnit(int b1, int b2, int b3, int b4) {
|
||||
return Ints.fromBytes(
|
||||
(byte) 0,
|
||||
UnsignedBytes.checkedCast(((b1 & 0x7) << 2) | (b2 & 0b0011_0000) >> 4),
|
||||
UnsignedBytes.checkedCast(((byte) b2 & 0xF) << 4 | ((byte) b3 & 0b0011_1100) >> 2),
|
||||
UnsignedBytes.checkedCast(((byte) b3 & 0x3) << 6 | ((byte) b4 & 0x3F)));
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ import android.text.TextUtils;
|
||||
import androidx.annotation.Nullable;
|
||||
import com.google.common.base.Ascii;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/** Utility methods for manipulating URIs. */
|
||||
@UnstableApi
|
||||
@ -306,7 +307,7 @@ public final class UriUtil {
|
||||
baseUriScheme == null
|
||||
? targetUriScheme == null
|
||||
: targetUriScheme != null && Ascii.equalsIgnoreCase(baseUriScheme, targetUriScheme);
|
||||
if (!isSameScheme || !Util.areEqual(baseUri.getAuthority(), targetUri.getAuthority())) {
|
||||
if (!isSameScheme || !Objects.equals(baseUri.getAuthority(), targetUri.getAuthority())) {
|
||||
// Different schemes or authorities, cannot find relative path, return targetUri.
|
||||
return targetUri.toString();
|
||||
}
|
||||
|
@ -95,6 +95,7 @@ import androidx.media3.common.ParserException;
|
||||
import androidx.media3.common.PlaybackException;
|
||||
import androidx.media3.common.Player;
|
||||
import androidx.media3.common.Player.Commands;
|
||||
import androidx.media3.common.audio.AudioManagerCompat;
|
||||
import androidx.media3.common.audio.AudioProcessor;
|
||||
import com.google.common.base.Ascii;
|
||||
import com.google.common.io.ByteStreams;
|
||||
@ -563,7 +564,7 @@ public final class Util {
|
||||
@UnstableApi
|
||||
public static boolean contains(@NullableType Object[] items, @Nullable Object item) {
|
||||
for (Object arrayItem : items) {
|
||||
if (areEqual(arrayItem, item)) {
|
||||
if (Objects.equals(arrayItem, item)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -795,7 +796,7 @@ public final class Util {
|
||||
if (!looper.getThread().isAlive()) {
|
||||
return false;
|
||||
}
|
||||
if (handler.getLooper() == Looper.myLooper()) {
|
||||
if (looper == Looper.myLooper()) {
|
||||
runnable.run();
|
||||
return true;
|
||||
} else {
|
||||
@ -2485,9 +2486,7 @@ public final class Util {
|
||||
*/
|
||||
@UnstableApi
|
||||
public static int generateAudioSessionIdV21(Context context) {
|
||||
@Nullable
|
||||
AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
||||
return audioManager == null ? AudioManager.ERROR : audioManager.generateAudioSessionId();
|
||||
return AudioManagerCompat.getAudioManager(context).generateAudioSessionId();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -3062,7 +3061,7 @@ public final class Util {
|
||||
@UnstableApi
|
||||
public static boolean inflate(
|
||||
ParsableByteArray input, ParsableByteArray output, @Nullable Inflater inflater) {
|
||||
if (input.bytesLeft() <= 0) {
|
||||
if (input.bytesLeft() == 0) {
|
||||
return false;
|
||||
}
|
||||
if (output.capacity() < input.bytesLeft()) {
|
||||
@ -3478,6 +3477,20 @@ public final class Util {
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns {@link C.BufferFlags} corresponding to {@link MediaCodec} flags. */
|
||||
@UnstableApi
|
||||
public static @C.BufferFlags int getBufferFlagsFromMediaCodecFlags(int mediaCodecFlags) {
|
||||
@C.BufferFlags int flags = 0;
|
||||
if ((mediaCodecFlags & MediaCodec.BUFFER_FLAG_KEY_FRAME) == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
|
||||
flags |= C.BUFFER_FLAG_KEY_FRAME;
|
||||
}
|
||||
if ((mediaCodecFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
|
||||
== MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
|
||||
flags |= C.BUFFER_FLAG_END_OF_STREAM;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
@UnstableApi
|
||||
public static boolean isFrameDropAllowedOnSurfaceInput(Context context) {
|
||||
// Prior to API 29, decoders may drop frames to keep their output surface from growing out of
|
||||
|
@ -324,6 +324,61 @@ public class AdPlaybackStateTest {
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withRemovedAdGroupCountBefore() {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState("adsId", 6_000_000L, 18_000_000L)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
|
||||
.withAdId(1, 0, "ad1-0")
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 1,
|
||||
/* adIndexInAdGroup= */ 0,
|
||||
new MediaItem.Builder()
|
||||
.setUri("http://example.com/media-1-0.m3u8")
|
||||
.setMimeType(MimeTypes.APPLICATION_M3U8)
|
||||
.build());
|
||||
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 6_000_000L))
|
||||
.isEqualTo(adPlaybackState);
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 6_000_001L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1));
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_000L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1));
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_001L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 2));
|
||||
assertThat(
|
||||
adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_001L).adGroupCount)
|
||||
.isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withRemovedAdGroupCountBefore_withLivePlaceholder() {
|
||||
AdPlaybackState adPlaybackState =
|
||||
new AdPlaybackState("adsId", 6_000_000L, 18_000_000L)
|
||||
.withLivePostrollPlaceholderAppended(/* isServerSideInserted= */ false)
|
||||
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
|
||||
.withAdId(1, 0, "ad1-0")
|
||||
.withAvailableAdMediaItem(
|
||||
/* adGroupIndex= */ 1,
|
||||
/* adIndexInAdGroup= */ 0,
|
||||
new MediaItem.Builder()
|
||||
.setUri("http://example.com/media-1-0.m3u8")
|
||||
.setMimeType(MimeTypes.APPLICATION_M3U8)
|
||||
.build());
|
||||
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 6_000_000L))
|
||||
.isEqualTo(adPlaybackState);
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 6_000_001L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1));
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_000L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 1));
|
||||
assertThat(adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_001L))
|
||||
.isEqualTo(adPlaybackState.withRemovedAdGroupCount(/* removedAdGroupCount= */ 2));
|
||||
assertThat(
|
||||
adPlaybackState.withRemovedAdGroupCountBefore(/* timeUs= */ 18_000_001L).adGroupCount)
|
||||
.isEqualTo(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withAvailableAd_forClientSideAdGroup_throwsRuntimeException() {
|
||||
AdPlaybackState state =
|
||||
|
@ -450,6 +450,7 @@ public class MediaItemTest {
|
||||
.setRelativeToLiveWindow(true)
|
||||
.setRelativeToDefaultPosition(true)
|
||||
.setStartsAtKeyFrame(true)
|
||||
.setAllowUnseekableMedia(true)
|
||||
.build())
|
||||
.build();
|
||||
|
||||
@ -458,6 +459,7 @@ public class MediaItemTest {
|
||||
assertThat(mediaItem.clippingConfiguration.relativeToLiveWindow).isTrue();
|
||||
assertThat(mediaItem.clippingConfiguration.relativeToDefaultPosition).isTrue();
|
||||
assertThat(mediaItem.clippingConfiguration.startsAtKeyFrame).isTrue();
|
||||
assertThat(mediaItem.clippingConfiguration.allowUnseekableMedia).isTrue();
|
||||
assertThat(mediaItem.clippingConfiguration).isEqualTo(mediaItem.clippingProperties);
|
||||
}
|
||||
|
||||
@ -475,6 +477,7 @@ public class MediaItemTest {
|
||||
assertThat(clippingConfiguration.relativeToLiveWindow).isFalse();
|
||||
assertThat(clippingConfiguration.relativeToDefaultPosition).isFalse();
|
||||
assertThat(clippingConfiguration.startsAtKeyFrame).isFalse();
|
||||
assertThat(clippingConfiguration.allowUnseekableMedia).isFalse();
|
||||
assertThat(clippingConfiguration).isEqualTo(MediaItem.ClippingConfiguration.UNSET);
|
||||
}
|
||||
|
||||
@ -503,6 +506,7 @@ public class MediaItemTest {
|
||||
.setStartPositionMs(1000L)
|
||||
.setEndPositionUs(2000_031L)
|
||||
.setStartsAtKeyFrame(true)
|
||||
.setAllowUnseekableMedia(true)
|
||||
.build();
|
||||
|
||||
MediaItem.ClippingConfiguration clippingConfigurationFromBundle =
|
||||
|
@ -30,12 +30,13 @@ public class PlayerTest {
|
||||
/**
|
||||
* This test picks a method on the {@link Player} interface that is known will never be
|
||||
* stabilised, and asserts that it is required to be implemented (therefore enforcing that {@link
|
||||
* Player} is unstable-for-implementors). If this test fails because the {@link Player#next()}
|
||||
* method is removed, it should be replaced with an equivalent unstable, unimplemented method.
|
||||
* Player} is unstable-for-implementors). If this test fails because the {@link
|
||||
* Player#getCurrentWindowIndex()} method is removed, it should be replaced with an equivalent
|
||||
* unstable, unimplemented method.
|
||||
*/
|
||||
@Test
|
||||
public void testAtLeastOneUnstableUnimplementedMethodExists() throws Exception {
|
||||
Method nextMethod = Player.class.getMethod("next");
|
||||
assertThat(nextMethod.isDefault()).isFalse();
|
||||
Method getCurrentWindowIndexMethod = Player.class.getMethod("getCurrentWindowIndex");
|
||||
assertThat(getCurrentWindowIndexMethod.isDefault()).isFalse();
|
||||
}
|
||||
}
|
||||
|
@ -172,38 +172,29 @@ public class SimpleBasePlayerTest {
|
||||
|
||||
@Test
|
||||
public void stateBuildUpon_withExplicitTimelineAndNewCurrentIndex_reevalutesMediaMetadata() {
|
||||
Timeline timeline =
|
||||
new FakeTimeline(
|
||||
new FakeTimeline.TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
/* id= */ 0,
|
||||
/* isSeekable= */ true,
|
||||
/* isDynamic= */ true,
|
||||
/* isLive= */ true,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs= */ 1000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
FakeTimeline.TimelineWindowDefinition timelineWindowDefinition0 =
|
||||
new FakeTimeline.TimelineWindowDefinition.Builder()
|
||||
.setDynamic(true)
|
||||
.setLive(true)
|
||||
.setDurationUs(1000L)
|
||||
.setWindowPositionInFirstPeriodUs(0L)
|
||||
.setMediaItem(
|
||||
new MediaItem.Builder()
|
||||
.setMediaId("0")
|
||||
.setMediaMetadata(new MediaMetadata.Builder().setArtist("artist0").build())
|
||||
.build())
|
||||
.build();
|
||||
FakeTimeline.TimelineWindowDefinition timelineWindowDefinition1 =
|
||||
timelineWindowDefinition0
|
||||
.buildUpon()
|
||||
.setUid(1)
|
||||
.setMediaItem(
|
||||
new MediaItem.Builder()
|
||||
.setMediaId("1")
|
||||
.setMediaMetadata(new MediaMetadata.Builder().setArtist("artist1").build())
|
||||
.build()),
|
||||
new FakeTimeline.TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
/* id= */ 1,
|
||||
/* isSeekable= */ true,
|
||||
/* isDynamic= */ true,
|
||||
/* isLive= */ true,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs= */ 1000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
new MediaItem.Builder()
|
||||
.setMediaId("2")
|
||||
.setMediaMetadata(new MediaMetadata.Builder().setArtist("artist2").build())
|
||||
.build()));
|
||||
.build())
|
||||
.build();
|
||||
Timeline timeline = new FakeTimeline(timelineWindowDefinition0, timelineWindowDefinition1);
|
||||
State state =
|
||||
new State.Builder()
|
||||
.setPlaylist(timeline, Tracks.EMPTY, /* currentMetadata= */ null)
|
||||
@ -213,7 +204,7 @@ public class SimpleBasePlayerTest {
|
||||
State newState = state.buildUpon().setCurrentMediaItemIndex(1).build();
|
||||
|
||||
assertThat(newState.currentMetadata)
|
||||
.isEqualTo(new MediaMetadata.Builder().setArtist("artist2").build());
|
||||
.isEqualTo(new MediaMetadata.Builder().setArtist("artist1").build());
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -430,25 +421,20 @@ public class SimpleBasePlayerTest {
|
||||
|
||||
@Test
|
||||
public void
|
||||
stateBuilderBuild_withUndefinedMediaMetadataAndExplicitTimeline_derivesMediaMetadataFromTracksAndMediaItem()
|
||||
throws Exception {
|
||||
stateBuilderBuild_withUndefinedMediaMetadataAndExplicitTimeline_derivesMediaMetadataFromTracksAndMediaItem() {
|
||||
Timeline timeline =
|
||||
new FakeTimeline(
|
||||
new FakeTimeline.TimelineWindowDefinition(
|
||||
/* periodCount= */ 1,
|
||||
/* id= */ 0,
|
||||
/* isSeekable= */ true,
|
||||
/* isDynamic= */ true,
|
||||
/* isLive= */ true,
|
||||
/* isPlaceholder= */ false,
|
||||
/* durationUs= */ 1000,
|
||||
/* defaultPositionUs= */ 0,
|
||||
/* windowOffsetInFirstPeriodUs= */ 0,
|
||||
ImmutableList.of(AdPlaybackState.NONE),
|
||||
new MediaItem.Builder()
|
||||
.setMediaId("1")
|
||||
.setMediaMetadata(new MediaMetadata.Builder().setArtist("artist").build())
|
||||
.build()));
|
||||
new FakeTimeline.TimelineWindowDefinition.Builder()
|
||||
.setDynamic(true)
|
||||
.setLive(true)
|
||||
.setDurationUs(1000L)
|
||||
.setWindowPositionInFirstPeriodUs(0L)
|
||||
.setMediaItem(
|
||||
new MediaItem.Builder()
|
||||
.setMediaId("1")
|
||||
.setMediaMetadata(new MediaMetadata.Builder().setArtist("artist").build())
|
||||
.build())
|
||||
.build());
|
||||
Tracks tracks =
|
||||
new Tracks(
|
||||
ImmutableList.of(
|
||||
@ -7558,7 +7544,6 @@ public class SimpleBasePlayerTest {
|
||||
verifyNoMoreInteractions(listener);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation") // Testing deprecated listener call.
|
||||
@Test
|
||||
public void
|
||||
replaceMediaItems_asyncHandlingFromEmptyToEmpty_usesPlaceholderStateAndInformsListeners() {
|
||||
|
@ -41,13 +41,17 @@ public final class AudioMixingUtilTest {
|
||||
new AudioFormat(/* sampleRate= */ 44100, /* channelCount= */ 1, C.ENCODING_PCM_16BIT);
|
||||
|
||||
private static final ChannelMixingMatrix STEREO_TO_STEREO =
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 2, /* outputChannelCount= */ 2);
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 2, /* outputChannelCount= */ 2);
|
||||
private static final ChannelMixingMatrix MONO_TO_STEREO =
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 1, /* outputChannelCount= */ 2);
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 1, /* outputChannelCount= */ 2);
|
||||
private static final ChannelMixingMatrix STEREO_TO_MONO =
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 2, /* outputChannelCount= */ 1);
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 2, /* outputChannelCount= */ 1);
|
||||
private static final ChannelMixingMatrix MONO_TO_MONO =
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 1, /* outputChannelCount= */ 1);
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 1, /* outputChannelCount= */ 1);
|
||||
|
||||
@Test
|
||||
public void mixToStereoFloat_withStereoFloatInput() {
|
||||
|
@ -41,9 +41,11 @@ public final class ChannelMixingAudioProcessorTest {
|
||||
public void setUp() {
|
||||
audioProcessor = new ChannelMixingAudioProcessor();
|
||||
audioProcessor.putChannelMixingMatrix(
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 2, /* outputChannelCount= */ 1));
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 2, /* outputChannelCount= */ 1));
|
||||
audioProcessor.putChannelMixingMatrix(
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 1, /* outputChannelCount= */ 2));
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 1, /* outputChannelCount= */ 2));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -102,7 +104,8 @@ public final class ChannelMixingAudioProcessorTest {
|
||||
@Test
|
||||
public void configureWithIdentityMatrix_isActiveReturnsFalse() throws Exception {
|
||||
audioProcessor.putChannelMixingMatrix(
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 2, /* outputChannelCount= */ 2));
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 2, /* outputChannelCount= */ 2));
|
||||
|
||||
audioProcessor.configure(AUDIO_FORMAT_48KHZ_STEREO_16BIT);
|
||||
assertThat(audioProcessor.isActive()).isFalse();
|
||||
@ -133,7 +136,8 @@ public final class ChannelMixingAudioProcessorTest {
|
||||
@Test
|
||||
public void scaledMixingMatrix_queueInput_outputIsScaled() throws Exception {
|
||||
audioProcessor.putChannelMixingMatrix(
|
||||
ChannelMixingMatrix.create(/* inputChannelCount= */ 2, /* outputChannelCount= */ 2)
|
||||
ChannelMixingMatrix.createForConstantGain(
|
||||
/* inputChannelCount= */ 2, /* outputChannelCount= */ 2)
|
||||
.scaleBy(0.5f));
|
||||
|
||||
audioProcessor.configure(AUDIO_FORMAT_48KHZ_STEREO_16BIT);
|
||||
|
@ -0,0 +1,316 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.audio.DefaultGainProvider.FADE_IN_EQUAL_POWER;
|
||||
import static androidx.media3.common.audio.DefaultGainProvider.FADE_IN_LINEAR;
|
||||
import static androidx.media3.common.audio.DefaultGainProvider.FADE_OUT_EQUAL_POWER;
|
||||
import static androidx.media3.common.audio.DefaultGainProvider.FADE_OUT_LINEAR;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.audio.DefaultGainProvider.FadeProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Unit test for {@link DefaultGainProvider}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class DefaultGainProviderTest {
|
||||
|
||||
private static final int SAMPLE_RATE = 50000;
|
||||
|
||||
private static final FadeProvider CONSTANT_VALUE_FADE = (index, duration) -> 0.5f;
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_withoutFades_returnsDefaultValue() {
|
||||
DefaultGainProvider provider = new DefaultGainProvider.Builder(/* defaultGain= */ 1f).build();
|
||||
assertThat(provider.getGainFactorAtSamplePosition(0, SAMPLE_RATE)).isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_withConstantFade_returnsFadeValue() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 0L, /* durationUs= */ C.MICROS_PER_SECOND, CONSTANT_VALUE_FADE)
|
||||
.build();
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_withFadeIn_returnsFadeValue() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(/* positionUs= */ 0L, /* durationUs= */ C.MICROS_PER_SECOND, FADE_IN_LINEAR)
|
||||
.build();
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ SAMPLE_RATE / 4, SAMPLE_RATE))
|
||||
.isEqualTo(0.25f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ SAMPLE_RATE / 2, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 3 * SAMPLE_RATE / 4, SAMPLE_RATE))
|
||||
.isEqualTo(0.75f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(/* samplePosition= */ SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_withNonTrivialFadeDuration_scalesFade() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 0L, /* durationUs= */ 4 * C.MICROS_PER_SECOND, FADE_IN_LINEAR)
|
||||
.build();
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(/* samplePosition= */ SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.25f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 2 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 3 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.75f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 4 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_withSubsequentSampleRateChange_rescalesFades() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(/* positionUs= */ 0L, /* durationUs= */ C.MICROS_PER_SECOND, FADE_IN_LINEAR)
|
||||
.build();
|
||||
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 0, /* sampleRate= */ SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ SAMPLE_RATE / 2, /* sampleRate= */ SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ SAMPLE_RATE, /* sampleRate= */ SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 0, /* sampleRate= */ 2 * SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ SAMPLE_RATE, /* sampleRate= */ 2 * SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 2 * SAMPLE_RATE, /* sampleRate= */ 2 * SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_afterAddFadeAt_appliesFadeInCorrectly() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
5 * C.MICROS_PER_SECOND, /* durationUs= */ 2 * C.MICROS_PER_SECOND, FADE_IN_LINEAR)
|
||||
.build();
|
||||
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 3 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 5 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 6 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 7 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_afterAddFadeAt_appliesFadeOutCorrectly() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 5 * C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ 4 * C.MICROS_PER_SECOND,
|
||||
FADE_OUT_LINEAR)
|
||||
.build();
|
||||
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 3 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 5 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 6 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.75f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 7 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 8 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.25f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 9 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getGainFactorAtSamplePosition_superposedFades_keepsLastAddedFadeOnTop() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 5 * C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ 5 * C.MICROS_PER_SECOND,
|
||||
FADE_IN_LINEAR)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 7 * C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
CONSTANT_VALUE_FADE)
|
||||
.build();
|
||||
|
||||
assertThat(provider.getGainFactorAtSamplePosition(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(1f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 5 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 6 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.2f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 7 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ (long) (7.5 * SAMPLE_RATE), SAMPLE_RATE))
|
||||
.isEqualTo(0.5f);
|
||||
assertThat(
|
||||
provider.getGainFactorAtSamplePosition(
|
||||
/* samplePosition= */ 8 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(0.6f);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void linearFades_maintainEqualGain() {
|
||||
int duration = 100;
|
||||
for (int i = 0; i <= duration; i++) {
|
||||
float inGain = FADE_IN_LINEAR.getGainFactorAt(/* index= */ i, /* duration= */ duration);
|
||||
float outGain = FADE_OUT_LINEAR.getGainFactorAt(/* index= */ i, /* duration= */ duration);
|
||||
assertThat(inGain + outGain).isWithin(Math.ulp(1.0f)).of(1f);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constantPowerFades_maintainEqualPower() {
|
||||
int duration = 100;
|
||||
for (int i = 0; i <= duration; i++) {
|
||||
float inGain = FADE_IN_EQUAL_POWER.getGainFactorAt(/* index= */ i, /* duration= */ 10);
|
||||
float outGain = FADE_OUT_EQUAL_POWER.getGainFactorAt(/* index= */ i, /* duration= */ 10);
|
||||
assertThat(inGain * inGain + outGain * outGain).isWithin(Math.ulp(1.0f)).of(1.0f);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isUnityUntil_withDefaultValueSetToUnity_returnsTimeEndOfStream() {
|
||||
DefaultGainProvider provider = new DefaultGainProvider.Builder(/* defaultGain= */ 1f).build();
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 0, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_END_OF_SOURCE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isUnityUntil_withDefaultValueSetToZero_returnsTimeUnset() {
|
||||
DefaultGainProvider provider = new DefaultGainProvider.Builder(/* defaultGain= */ 0f).build();
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 0, SAMPLE_RATE)).isEqualTo(C.TIME_UNSET);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isUnityUntil_withMultipleNonUnityRegions_resolvesResultingUnityRegions() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
CONSTANT_VALUE_FADE)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 3 * C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
CONSTANT_VALUE_FADE)
|
||||
.build();
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 0, SAMPLE_RATE)).isEqualTo(SAMPLE_RATE);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_UNSET);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 2 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(3 * SAMPLE_RATE);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 3 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_UNSET);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 4 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_END_OF_SOURCE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isUnityUntil_withNonUnityRegionStartingAtUnity_doesNotSkipNonUnityRegion() {
|
||||
DefaultGainProvider provider =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ C.MICROS_PER_SECOND,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
FADE_OUT_LINEAR)
|
||||
.build();
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 0, SAMPLE_RATE)).isEqualTo(SAMPLE_RATE);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_UNSET);
|
||||
assertThat(provider.isUnityUntil(/* samplePosition= */ 2 * SAMPLE_RATE, SAMPLE_RATE))
|
||||
.isEqualTo(C.TIME_END_OF_SOURCE);
|
||||
}
|
||||
}
|
@ -0,0 +1,257 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.audio.AudioProcessor.EMPTY_BUFFER;
|
||||
import static androidx.media3.test.utils.TestUtil.createByteBuffer;
|
||||
import static androidx.media3.test.utils.TestUtil.createFloatArray;
|
||||
import static androidx.media3.test.utils.TestUtil.createShortArray;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.audio.AudioProcessor.AudioFormat;
|
||||
import androidx.media3.common.audio.AudioProcessor.UnhandledAudioFormatException;
|
||||
import androidx.media3.common.audio.DefaultGainProvider.FadeProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Unit test for {@link GainProcessor}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class GainProcessorTest {
|
||||
|
||||
private static final FadeProvider CONSTANT_VALUE_FADE = (index, duration) -> 0.5f;
|
||||
|
||||
private static final DefaultGainProvider HUNDRED_US_FADE_IN_PROVIDER =
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(
|
||||
/* positionUs= */ 0L, /* durationUs= */ 100, DefaultGainProvider.FADE_IN_LINEAR)
|
||||
.build();
|
||||
|
||||
private static final AudioFormat MONO_50KHZ_16BIT_FORMAT =
|
||||
new AudioFormat(/* sampleRate= */ 50000, /* channelCount= */ 1, C.ENCODING_PCM_16BIT);
|
||||
private static final AudioFormat MONO_100KHZ_16BIT_FORMAT =
|
||||
new AudioFormat(/* sampleRate= */ 100000, /* channelCount= */ 1, C.ENCODING_PCM_16BIT);
|
||||
|
||||
private static final AudioFormat MONO_50KHZ_FLOAT_FORMAT =
|
||||
new AudioFormat(/* sampleRate= */ 50000, /* channelCount= */ 1, C.ENCODING_PCM_FLOAT);
|
||||
|
||||
@Test
|
||||
public void applyGain_withMutingGainProvider_returnsAllZeroes()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor =
|
||||
new GainProcessor(new DefaultGainProvider.Builder(/* defaultGain= */ 0f).build());
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new short[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1});
|
||||
processor.queueInput(input);
|
||||
|
||||
ByteBuffer output = processor.getOutput();
|
||||
assertThat(output.remaining()).isEqualTo(20);
|
||||
while (output.hasRemaining()) {
|
||||
assertThat(output.getShort()).isEqualTo(0);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void applyGain_withFadeIn_returnsScaledSamples() throws UnhandledAudioFormatException {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new short[] {100, 100, 100, 100, 100, 100, 100});
|
||||
processor.queueInput(input);
|
||||
ByteBuffer output = processor.getOutput();
|
||||
|
||||
short[] outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {0, 20, 40, 60, 80, 100, 100});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void applyGain_withFloatSamples_returnsScaledSamples()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
processor.configure(MONO_50KHZ_FLOAT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new float[] {1, 1, 1, 1, 1, 1, 1});
|
||||
processor.queueInput(input);
|
||||
ByteBuffer output = processor.getOutput();
|
||||
|
||||
float[] outputSamples = createFloatArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new float[] {0f, 0.2f, 0.4f, 0.6f, 0.8f, 1f, 1f});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void applyGain_afterSampleRateChange_stretchesFade() throws UnhandledAudioFormatException {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new short[] {100, 100, 100, 100, 100, 100, 100});
|
||||
processor.queueInput(input);
|
||||
ByteBuffer output = processor.getOutput();
|
||||
|
||||
short[] outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {0, 20, 40, 60, 80, 100, 100});
|
||||
|
||||
processor.configure(MONO_100KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
input.rewind();
|
||||
processor.queueInput(input);
|
||||
output.clear();
|
||||
output = processor.getOutput();
|
||||
|
||||
outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {0, 10, 20, 30, 40, 50, 60});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void applyGain_withMultipleQueueInputCalls_appliesGainAtCorrectPosition()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor =
|
||||
new GainProcessor(
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(/* positionUs= */ 100, /* durationUs= */ 100, CONSTANT_VALUE_FADE)
|
||||
.build());
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new short[] {100, 100, 100, 100, 100});
|
||||
processor.queueInput(input);
|
||||
ByteBuffer output = processor.getOutput();
|
||||
|
||||
short[] outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {100, 100, 100, 100, 100});
|
||||
|
||||
input.rewind();
|
||||
processor.queueInput(input);
|
||||
output.clear();
|
||||
output = processor.getOutput();
|
||||
|
||||
outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {50, 50, 50, 50, 50});
|
||||
|
||||
input.rewind();
|
||||
processor.queueInput(input);
|
||||
output.clear();
|
||||
output = processor.getOutput();
|
||||
|
||||
outputSamples = createShortArray(output);
|
||||
assertThat(outputSamples).isEqualTo(new short[] {100, 100, 100, 100, 100});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void applyGain_withSingleQueueInputCall_appliesGainAtCorrectPosition()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor =
|
||||
new GainProcessor(
|
||||
new DefaultGainProvider.Builder(/* defaultGain= */ 1f)
|
||||
.addFadeAt(/* positionUs= */ 100, /* durationUs= */ 100, CONSTANT_VALUE_FADE)
|
||||
.build());
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
// 15 mono frames set to 100.
|
||||
ByteBuffer input =
|
||||
createByteBuffer(
|
||||
new short[] {
|
||||
100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100
|
||||
});
|
||||
processor.queueInput(input);
|
||||
ByteBuffer output = processor.getOutput();
|
||||
|
||||
short[] outputSamples = createShortArray(output);
|
||||
// 5 frames at unity + 5 frames with gain 0.5 (100 * 0.5 = 50) + 5 frames with at unity.
|
||||
assertThat(outputSamples)
|
||||
.isEqualTo(
|
||||
new short[] {100, 100, 100, 100, 100, 50, 50, 50, 50, 50, 100, 100, 100, 100, 100});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isEnded_afterQueueEndOfStreamWithNoPendingOutput_returnsTrue()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
processor.flush();
|
||||
|
||||
ByteBuffer input = createByteBuffer(new short[] {100, 100, 100, 100, 100, 100, 100});
|
||||
processor.queueInput(input);
|
||||
processor.queueEndOfStream();
|
||||
|
||||
assertThat(processor.isEnded()).isFalse();
|
||||
processor.getOutput();
|
||||
assertThat(processor.isEnded()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_beforeConfigureAndFlush_throwsIllegalStateException()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
|
||||
assertThrows(IllegalStateException.class, () -> processor.queueInput(EMPTY_BUFFER));
|
||||
processor.configure(MONO_50KHZ_16BIT_FORMAT);
|
||||
assertThrows(IllegalStateException.class, () -> processor.queueInput(EMPTY_BUFFER));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configure_withUnsupportedEncoding_throwsUnhandledAudioFormatException() {
|
||||
GainProcessor processor = new GainProcessor(HUNDRED_US_FADE_IN_PROVIDER);
|
||||
assertThrows(
|
||||
UnhandledAudioFormatException.class,
|
||||
() ->
|
||||
processor.configure(
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50000,
|
||||
/* channelCount= */ 1,
|
||||
C.ENCODING_PCM_16BIT_BIG_ENDIAN)));
|
||||
assertThrows(
|
||||
UnhandledAudioFormatException.class,
|
||||
() ->
|
||||
processor.configure(
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50000,
|
||||
/* channelCount= */ 1,
|
||||
C.ENCODING_PCM_24BIT_BIG_ENDIAN)));
|
||||
assertThrows(
|
||||
UnhandledAudioFormatException.class,
|
||||
() ->
|
||||
processor.configure(
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50000,
|
||||
/* channelCount= */ 1,
|
||||
C.ENCODING_PCM_32BIT_BIG_ENDIAN)));
|
||||
assertThrows(
|
||||
UnhandledAudioFormatException.class,
|
||||
() ->
|
||||
processor.configure(
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50000, /* channelCount= */ 1, C.ENCODING_INVALID)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isActive_withConstantGainProviderAtUnity_returnsFalse()
|
||||
throws UnhandledAudioFormatException {
|
||||
GainProcessor processor =
|
||||
new GainProcessor(new DefaultGainProvider.Builder(/* defaultGain= */ 1).build());
|
||||
processor.configure(MONO_50KHZ_FLOAT_FORMAT);
|
||||
processor.flush();
|
||||
assertThat(processor.isActive()).isFalse();
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
package androidx.media3.common.audio;
|
||||
|
||||
import static androidx.media3.common.audio.AudioProcessor.EMPTY_BUFFER;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.audio.SpeedChangingAudioProcessor.getInputFrameCountForOutput;
|
||||
import static androidx.media3.test.utils.TestUtil.getNonRandomByteBuffer;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
@ -36,53 +36,59 @@ import org.junit.runner.RunWith;
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
private static final AudioFormat AUDIO_FORMAT =
|
||||
private static final AudioFormat AUDIO_FORMAT_44_100HZ =
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 44100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
/* sampleRate= */ 44_100, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
|
||||
private static final AudioFormat AUDIO_FORMAT_50_000HZ =
|
||||
new AudioFormat(
|
||||
/* sampleRate= */ 50_000, /* channelCount= */ 2, /* encoding= */ C.ENCODING_PCM_16BIT);
|
||||
|
||||
@Test
|
||||
public void queueInput_noSpeedChange_doesNotOverwriteInput() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
inputBuffer.rewind();
|
||||
assertThat(inputBuffer)
|
||||
.isEqualTo(getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame));
|
||||
.isEqualTo(
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_speedChange_doesNotOverwriteInput() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
inputBuffer.rewind();
|
||||
assertThat(inputBuffer)
|
||||
.isEqualTo(getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame));
|
||||
.isEqualTo(
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_noSpeedChange_copiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -96,11 +102,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueInput_speedChange_modifiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -115,11 +121,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueInput_noSpeedChangeAfterSpeedChange_copiesSamples() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -136,11 +144,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {1, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {1, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -150,7 +160,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -165,11 +175,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {3, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {3, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -179,7 +191,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -194,18 +206,20 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 3});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 3});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
ByteBuffer outputBuffer = getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
|
||||
speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
speedChangingAudioProcessor = getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
@ -218,7 +232,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
@Test
|
||||
public void queueInput_multipleSpeedsInBufferWithLimitAtFrameBoundary_readsDataUntilSpeedLimit()
|
||||
throws Exception {
|
||||
long speedChangeTimeUs = 4 * C.MICROS_PER_SECOND / AUDIO_FORMAT.sampleRate;
|
||||
long speedChangeTimeUs = 4 * C.MICROS_PER_SECOND / AUDIO_FORMAT_44_100HZ.sampleRate;
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithStartTimes(
|
||||
/* startTimesUs= */ new long[] {0L, speedChangeTimeUs},
|
||||
@ -226,19 +240,19 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT.bytesPerFrame);
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
assertThat(inputBuffer.limit()).isEqualTo(inputBufferLimit);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInput_multipleSpeedsInBufferWithLimitInsideFrame_readsDataUntilSpeedLimit()
|
||||
throws Exception {
|
||||
long speedChangeTimeUs = (long) (3.5 * C.MICROS_PER_SECOND / AUDIO_FORMAT.sampleRate);
|
||||
long speedChangeTimeUs = (long) (3.5 * C.MICROS_PER_SECOND / AUDIO_FORMAT_44_100HZ.sampleRate);
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithStartTimes(
|
||||
/* startTimesUs= */ new long[] {0L, speedChangeTimeUs},
|
||||
@ -246,12 +260,12 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int inputBufferLimit = inputBuffer.limit();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT.bytesPerFrame);
|
||||
assertThat(inputBuffer.position()).isEqualTo(4 * AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
assertThat(inputBuffer.limit()).isEqualTo(inputBufferLimit);
|
||||
}
|
||||
|
||||
@ -266,18 +280,18 @@ public class SpeedChangingAudioProcessorTest {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
// SpeedChangingAudioProcessor only queues samples until the next speed change.
|
||||
while (inputBuffer.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
outputFrames +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrames +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
// We allow 1 sample of tolerance per speed change.
|
||||
assertThat(outputFrames).isWithin(1).of(3);
|
||||
}
|
||||
@ -287,11 +301,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -307,11 +323,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {1, 2});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {5, 5},
|
||||
/* speeds= */ new float[] {1, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
inputBuffer.rewind();
|
||||
@ -327,11 +345,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -344,11 +362,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
@ -360,7 +378,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void queueEndOfStream_noInputQueued_endsProcessor() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
|
||||
@ -373,11 +391,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void isEnded_afterNoSpeedChangeAndOutputRetrieved_isFalse() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
@ -389,11 +407,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void isEnded_afterSpeedChangeAndOutputRetrieved_isFalse() throws Exception {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
AUDIO_FORMAT_44_100HZ, /* frameCounts= */ new int[] {5}, /* speeds= */ new float[] {2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
@ -402,147 +420,89 @@ public class SpeedChangingAudioProcessorTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_callbacksCalledWithCorrectParameters() throws Exception {
|
||||
public void getSpeedAdjustedTimeAsync_beforeFlush_callbacksCalledWithCorrectParametersAfterFlush()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
// Sample period = 20us.
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 50L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
/* inputTimeUs= */ 40L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 100L, outputTimesUs::add);
|
||||
/* inputTimeUs= */ 80L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 150L, outputTimesUs::add);
|
||||
/* inputTimeUs= */ 160L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (150 - 113)*1) -> 93
|
||||
assertThat(outputTimesUs).containsExactly(25L, 50L, 93L);
|
||||
assertThat(outputTimesUs).isEmpty();
|
||||
speedChangingAudioProcessor.flush();
|
||||
assertThat(outputTimesUs).containsExactly(20L, 40L, 100L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_afterFlush_callbacksCalledWithCorrectParameters()
|
||||
public void getSpeedAdjustedTimeAsync_afterCallToFlush_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate). Also add another speed change
|
||||
// to 3x at a later point that should not be used if the flush is handled correctly.
|
||||
// Sample period = 20us.
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
/* frameCounts= */ new int[] {5, 5, 5},
|
||||
/* speeds= */ new float[] {2, 1, 3});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
// Use the audio processor before a flush
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
|
||||
// Flush and use it again.
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 50L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 100L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 150L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (150 - 113)*1) -> 93
|
||||
assertThat(outputTimesUs).containsExactly(25L, 50L, 93L);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 40L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 80L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 160L, outputTimesUs::add);
|
||||
|
||||
assertThat(outputTimesUs).containsExactly(20L, 40L, 100L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSpeedAdjustedTimeAsync_timeAfterEndTime_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
// The speed change is at 120Us (6*MICROS_PER_SECOND/sampleRate).
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {6, 6},
|
||||
/* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 3, AUDIO_FORMAT.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 300L, outputTimesUs::add);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (300 - 113)*1) -> 243
|
||||
assertThat(outputTimesUs).containsExactly(243L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void
|
||||
getSpeedAdjustedTimeAsync_timeAfterEndTimeAfterProcessorEnded_callbacksCalledWithCorrectParameters()
|
||||
throws Exception {
|
||||
ArrayList<Long> outputTimesUs = new ArrayList<>();
|
||||
// The speed change is at 113Us (5*MICROS_PER_SECOND/sampleRate).
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {5, 5}, /* speeds= */ new float[] {2, 1});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 5, AUDIO_FORMAT.bytesPerFrame);
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
inputBuffer.rewind();
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
checkState(speedChangingAudioProcessor.isEnded());
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
|
||||
speedChangingAudioProcessor.getSpeedAdjustedTimeAsync(
|
||||
/* inputTimeUs= */ 300L, outputTimesUs::add);
|
||||
|
||||
// 150 is after the speed change so floor(113 / 2 + (300 - 113)*1) -> 243
|
||||
assertThat(outputTimesUs).containsExactly(243L);
|
||||
assertThat(outputTimesUs).containsExactly(240L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getMediaDurationUs_returnsCorrectValues() throws Exception {
|
||||
// The speed changes happen every 10ms (441 samples @ 441.KHz)
|
||||
// The speed changes happen every 10ms (500 samples @ 50.KHz)
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
/* frameCounts= */ new int[] {441, 441, 441, 441},
|
||||
AUDIO_FORMAT_50_000HZ,
|
||||
/* frameCounts= */ new int[] {500, 500, 500, 500},
|
||||
/* speeds= */ new float[] {2, 1, 5, 2});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer inputBuffer =
|
||||
getNonRandomByteBuffer(/* frameCount= */ 441 * 4, AUDIO_FORMAT.bytesPerFrame);
|
||||
while (inputBuffer.position() < inputBuffer.limit()) {
|
||||
speedChangingAudioProcessor.queueInput(inputBuffer);
|
||||
}
|
||||
getAudioProcessorOutput(speedChangingAudioProcessor);
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_50_000HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
|
||||
// input (in ms) (0, 10, 20, 30, 40) ->
|
||||
// output (in ms) (0, 10/2, 10/2 + 10, 10/2 + 10 + 10/5, 10/2 + 10 + 10/5 + 10/2)
|
||||
@ -572,30 +532,30 @@ public class SpeedChangingAudioProcessorTest {
|
||||
int outputFrameCount = 0;
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000, 1000},
|
||||
/* speeds= */ new float[] {2, 4, 2}); // 500, 250, 500 = 1250
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1000, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1000, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
input.rewind();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
input.rewind();
|
||||
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(2).of(1250);
|
||||
}
|
||||
|
||||
@ -612,17 +572,17 @@ public class SpeedChangingAudioProcessorTest {
|
||||
/* speeds= */ new float[] {2, 3, 8, 4});
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
ByteBuffer input = getNonRandomByteBuffer(12, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(12, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
|
||||
// Allow one sample of tolerance per effectively applied speed change.
|
||||
assertThat(outputFrameCount).isWithin(1).of(4);
|
||||
@ -633,23 +593,23 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000},
|
||||
/* speeds= */ new float[] {1, 2}); // 1000, 500.
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
// 1500 input frames falls in the middle of the 2x region.
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int outputFrameCount = 0;
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.flush();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isEqualTo(1250);
|
||||
input.rewind();
|
||||
|
||||
@ -659,11 +619,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(1).of(2500); // 1250 * 2.
|
||||
}
|
||||
|
||||
@ -672,23 +632,23 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 1000},
|
||||
/* speeds= */ new float[] {2, 4}); // 500, 250.
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
getConfiguredSpeedChangingAudioProcessor(speedProvider);
|
||||
// 1500 input frames falls in the middle of the 2x region.
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT.bytesPerFrame);
|
||||
ByteBuffer input = getNonRandomByteBuffer(1500, AUDIO_FORMAT_44_100HZ.bytesPerFrame);
|
||||
int outputFrameCount = 0;
|
||||
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.flush();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(1).of(625);
|
||||
input.rewind();
|
||||
|
||||
@ -698,11 +658,11 @@ public class SpeedChangingAudioProcessorTest {
|
||||
while (input.hasRemaining()) {
|
||||
speedChangingAudioProcessor.queueInput(input);
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
}
|
||||
speedChangingAudioProcessor.queueEndOfStream();
|
||||
outputFrameCount +=
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT.bytesPerFrame;
|
||||
speedChangingAudioProcessor.getOutput().remaining() / AUDIO_FORMAT_44_100HZ.bytesPerFrame;
|
||||
assertThat(outputFrameCount).isWithin(2).of(1250); // 625 * 2.
|
||||
}
|
||||
|
||||
@ -716,7 +676,7 @@ public class SpeedChangingAudioProcessorTest {
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 100);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 100);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(50);
|
||||
}
|
||||
|
||||
@ -724,13 +684,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
public void getSampleCountAfterProcessorApplied_withMultipleSpeeds_outputsExpectedSamples() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {100, 400, 50},
|
||||
/* speeds= */ new float[] {2.f, 4f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 550);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 550);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(250);
|
||||
}
|
||||
|
||||
@ -739,13 +699,13 @@ public class SpeedChangingAudioProcessorTest {
|
||||
getSampleCountAfterProcessorApplied_beyondLastSpeedRegion_stillAppliesLastSpeedValue() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {100, 400, 50},
|
||||
/* speeds= */ new float[] {2.f, 4f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 3000);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 3000);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5150);
|
||||
}
|
||||
|
||||
@ -754,38 +714,38 @@ public class SpeedChangingAudioProcessorTest {
|
||||
getSampleCountAfterProcessorApplied_withInputCountBeyondIntRange_outputsExpectedSamples() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 3_000_000_000L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5999984250L);
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 3_000_000_000L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(5_999_984_250L);
|
||||
}
|
||||
|
||||
// Testing range validation.
|
||||
@SuppressLint("Range")
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withNegativeSampleCount_throws() {
|
||||
public void getSampleCountAfterProcessorApplied_withNegativeFrameCount_throws() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT.sampleRate, /* inputSamples= */ -2L));
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ -2L));
|
||||
}
|
||||
|
||||
// Testing range validation.
|
||||
@SuppressLint("Range")
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withZeroSampleRate_throws() {
|
||||
public void getSampleCountAfterProcessorApplied_withZeroFrameRate_throws() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT,
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
assertThrows(
|
||||
@ -801,14 +761,32 @@ public class SpeedChangingAudioProcessorTest {
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
/* speedProvider= */ null, AUDIO_FORMAT.sampleRate, /* inputSamples= */ 1000L));
|
||||
/* speedProvider= */ null,
|
||||
AUDIO_FORMAT_44_100HZ.sampleRate,
|
||||
/* inputSamples= */ 1000L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getSampleCountAfterProcessorApplied_withZeroInputFrames_returnsZero() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
|
||||
long sampleCountAfterProcessorApplied =
|
||||
SpeedChangingAudioProcessor.getSampleCountAfterProcessorApplied(
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* inputSamples= */ 0L);
|
||||
assertThat(sampleCountAfterProcessorApplied).isEqualTo(0L);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isActive_beforeConfigure_returnsFalse() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {1000}, /* speeds= */ new float[] {2f});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000},
|
||||
/* speeds= */ new float[] {2f});
|
||||
|
||||
SpeedChangingAudioProcessor processor = new SpeedChangingAudioProcessor(speedProvider);
|
||||
assertThat(processor.isActive()).isFalse();
|
||||
@ -819,18 +797,34 @@ public class SpeedChangingAudioProcessorTest {
|
||||
throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT, /* frameCounts= */ new int[] {1000}, /* speeds= */ new float[] {2f});
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000},
|
||||
/* speeds= */ new float[] {2f});
|
||||
|
||||
SpeedChangingAudioProcessor processor = new SpeedChangingAudioProcessor(speedProvider);
|
||||
processor.configure(AUDIO_FORMAT);
|
||||
processor.configure(AUDIO_FORMAT_44_100HZ);
|
||||
assertThat(processor.isActive()).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getInputFrameCountForOutput_withZeroOutputFrames_returnsZero() {
|
||||
SpeedProvider speedProvider =
|
||||
TestSpeedProvider.createWithFrameCounts(
|
||||
AUDIO_FORMAT_44_100HZ,
|
||||
/* frameCounts= */ new int[] {1000, 10000, 8200},
|
||||
/* speeds= */ new float[] {0.2f, 8f, 0.5f});
|
||||
|
||||
long inputFrames =
|
||||
getInputFrameCountForOutput(
|
||||
speedProvider, AUDIO_FORMAT_44_100HZ.sampleRate, /* outputFrameCount= */ 0L);
|
||||
assertThat(inputFrames).isEqualTo(0L);
|
||||
}
|
||||
|
||||
private static SpeedChangingAudioProcessor getConfiguredSpeedChangingAudioProcessor(
|
||||
SpeedProvider speedProvider) throws AudioProcessor.UnhandledAudioFormatException {
|
||||
SpeedChangingAudioProcessor speedChangingAudioProcessor =
|
||||
new SpeedChangingAudioProcessor(speedProvider);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT);
|
||||
speedChangingAudioProcessor.configure(AUDIO_FORMAT_44_100HZ);
|
||||
speedChangingAudioProcessor.flush();
|
||||
return speedChangingAudioProcessor;
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -245,6 +245,9 @@ public abstract class Mp4Box {
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_esds = 0x65736473;
|
||||
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_btrt = 0x62747274;
|
||||
|
||||
@SuppressWarnings("ConstantCaseForConstants")
|
||||
public static final int TYPE_moof = 0x6d6f6f66;
|
||||
|
||||
|
@ -106,6 +106,9 @@ public final class NalUnitUtil {
|
||||
*/
|
||||
@Deprecated public static final int NAL_UNIT_TYPE_PREFIX = H264_NAL_UNIT_TYPE_PREFIX;
|
||||
|
||||
/** H.264 unspecified NAL unit. */
|
||||
public static final int H264_NAL_UNIT_TYPE_UNSPECIFIED = 24;
|
||||
|
||||
/** H.265 coded slice segment of a random access skipped leading picture (RASL_R). */
|
||||
public static final int H265_NAL_UNIT_TYPE_RASL_R = 9;
|
||||
|
||||
@ -133,6 +136,9 @@ public final class NalUnitUtil {
|
||||
/** H.265 suffixed supplemental enhancement information (SUFFIX_SEI_NUT). */
|
||||
public static final int H265_NAL_UNIT_TYPE_SUFFIX_SEI = 40;
|
||||
|
||||
/** H.265 unspecified NAL unit. */
|
||||
public static final int H265_NAL_UNIT_TYPE_UNSPECIFIED = 48;
|
||||
|
||||
/** Holds data parsed from a H.264 sequence parameter set NAL unit. */
|
||||
public static final class SpsData {
|
||||
|
||||
|
@ -43,6 +43,9 @@ public final class ObuParser {
|
||||
/** OBU type frame header. */
|
||||
public static final int OBU_FRAME_HEADER = 3;
|
||||
|
||||
/** OBU type metadata. */
|
||||
public static final int OBU_METADATA = 5;
|
||||
|
||||
/** OBU type frame. */
|
||||
public static final int OBU_FRAME = 6;
|
||||
|
||||
@ -136,6 +139,48 @@ public final class ObuParser {
|
||||
/** See {@code OrderHintBits}. */
|
||||
public final int orderHintBits;
|
||||
|
||||
/** See {@code seq_profile}. */
|
||||
public final int seqProfile;
|
||||
|
||||
/** See {@code seq_level_idx}. */
|
||||
public final int seqLevelIdx0;
|
||||
|
||||
/** See {@code seq_tier}. */
|
||||
public final int seqTier0;
|
||||
|
||||
/** See {@code initial_display_delay_present}. */
|
||||
public final boolean initialDisplayDelayPresentFlag;
|
||||
|
||||
/** See {@code initial_display_delay_minus_one}. */
|
||||
public final int initialDisplayDelayMinus1;
|
||||
|
||||
/** See {@code high_bitdepth}. */
|
||||
public final boolean highBitdepth;
|
||||
|
||||
/** See {@code twelve_bit}. */
|
||||
public final boolean twelveBit;
|
||||
|
||||
/** See {@code mono_chrome}. */
|
||||
public final boolean monochrome;
|
||||
|
||||
/** See {@code subsampling_x}. */
|
||||
public final boolean subsamplingX;
|
||||
|
||||
/** See {@code subsampling_Y}. */
|
||||
public final boolean subsamplingY;
|
||||
|
||||
/** See {@code chroma_sample_position}. */
|
||||
public final int chromaSamplePosition;
|
||||
|
||||
/** See {@code color_primaries}. */
|
||||
public final byte colorPrimaries;
|
||||
|
||||
/** See {@code transfer_characteristics}. */
|
||||
public final byte transferCharacteristics;
|
||||
|
||||
/** See {@code matrix_coefficients}. */
|
||||
public final byte matrixCoefficients;
|
||||
|
||||
/**
|
||||
* Returns a {@link SequenceHeader} parsed from the input OBU, or {@code null} if the AV1
|
||||
* bitstream is not yet supported.
|
||||
@ -153,38 +198,60 @@ public final class ObuParser {
|
||||
|
||||
/** Parses a {@link #OBU_SEQUENCE_HEADER} and creates an instance. */
|
||||
private SequenceHeader(Obu obu) throws NotYetImplementedException {
|
||||
int seqLevelIdx0 = 0;
|
||||
int seqTier0 = 0;
|
||||
int initialDisplayDelayMinus1 = 0;
|
||||
checkArgument(obu.type == OBU_SEQUENCE_HEADER);
|
||||
byte[] data = new byte[obu.payload.remaining()];
|
||||
// Do not modify obu.payload while reading it.
|
||||
obu.payload.asReadOnlyBuffer().get(data);
|
||||
ParsableBitArray obuData = new ParsableBitArray(data);
|
||||
obuData.skipBits(4); // seq_profile and still_picture
|
||||
seqProfile = obuData.readBits(3);
|
||||
obuData.skipBit(); // still_picture
|
||||
reducedStillPictureHeader = obuData.readBit();
|
||||
throwWhenFeatureRequired(reducedStillPictureHeader);
|
||||
boolean timingInfoPresentFlag = obuData.readBit();
|
||||
if (timingInfoPresentFlag) {
|
||||
skipTimingInfo(obuData);
|
||||
decoderModelInfoPresentFlag = obuData.readBit();
|
||||
if (decoderModelInfoPresentFlag) {
|
||||
// skip decoder_model_info()
|
||||
obuData.skipBits(47);
|
||||
}
|
||||
} else {
|
||||
if (reducedStillPictureHeader) {
|
||||
seqLevelIdx0 = obuData.readBits(5);
|
||||
decoderModelInfoPresentFlag = false;
|
||||
}
|
||||
boolean initialDisplayDelayPresentFlag = obuData.readBit();
|
||||
int operatingPointsCntMinus1 = obuData.readBits(5);
|
||||
for (int i = 0; i <= operatingPointsCntMinus1; i++) {
|
||||
obuData.skipBits(12); // operating_point_idc[ i ]
|
||||
int seqLevelIdx = obuData.readBits(5);
|
||||
if (seqLevelIdx > 7) {
|
||||
obuData.skipBit(); // seq_tier[ i ]
|
||||
initialDisplayDelayPresentFlag = false;
|
||||
} else {
|
||||
boolean timingInfoPresentFlag = obuData.readBit();
|
||||
if (timingInfoPresentFlag) {
|
||||
skipTimingInfo(obuData);
|
||||
decoderModelInfoPresentFlag = obuData.readBit();
|
||||
if (decoderModelInfoPresentFlag) {
|
||||
// skip decoder_model_info()
|
||||
obuData.skipBits(47);
|
||||
}
|
||||
} else {
|
||||
decoderModelInfoPresentFlag = false;
|
||||
}
|
||||
throwWhenFeatureRequired(decoderModelInfoPresentFlag);
|
||||
if (initialDisplayDelayPresentFlag) {
|
||||
boolean initialDisplayDelayPresentForThisOpFlag = obuData.readBit();
|
||||
if (initialDisplayDelayPresentForThisOpFlag) {
|
||||
obuData.skipBits(4); // initial_display_delay_minus_1[ i ]
|
||||
initialDisplayDelayPresentFlag = obuData.readBit();
|
||||
int operatingPointsCntMinus1 = obuData.readBits(5);
|
||||
for (int i = 0; i <= operatingPointsCntMinus1; i++) {
|
||||
obuData.skipBits(12); // operating_point_idc[ i ]
|
||||
if (i == 0) {
|
||||
seqLevelIdx0 = obuData.readBits(5);
|
||||
if (seqLevelIdx0 > 7) {
|
||||
seqTier0 = obuData.readBit() ? 1 : 0;
|
||||
}
|
||||
} else {
|
||||
int seqLevelIdx = obuData.readBits(5);
|
||||
if (seqLevelIdx > 7) {
|
||||
obuData.skipBit(); // seq_tier[ i ]
|
||||
}
|
||||
}
|
||||
if (decoderModelInfoPresentFlag) {
|
||||
obuData.skipBit(); // decoder_model_present_for_this_op
|
||||
}
|
||||
if (initialDisplayDelayPresentFlag) {
|
||||
boolean initialDisplayDelayPresentForThisOpFlag = obuData.readBit();
|
||||
if (initialDisplayDelayPresentForThisOpFlag) {
|
||||
if (i == 0) {
|
||||
initialDisplayDelayMinus1 = obuData.readBits(4);
|
||||
} else {
|
||||
obuData.skipBits(4); // initial_display_delay_minus_1[ i ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -192,39 +259,119 @@ public final class ObuParser {
|
||||
int frameHeightBitsMinus1 = obuData.readBits(4);
|
||||
obuData.skipBits(frameWidthBitsMinus1 + 1); // max_frame_width_minus_1
|
||||
obuData.skipBits(frameHeightBitsMinus1 + 1); // max_frame_height_minus_1
|
||||
frameIdNumbersPresentFlag = obuData.readBit();
|
||||
throwWhenFeatureRequired(frameIdNumbersPresentFlag);
|
||||
if (!reducedStillPictureHeader) {
|
||||
frameIdNumbersPresentFlag = obuData.readBit();
|
||||
} else {
|
||||
frameIdNumbersPresentFlag = false;
|
||||
}
|
||||
if (frameIdNumbersPresentFlag) {
|
||||
obuData.skipBits(4); // delta_frame_id_length_minus_2
|
||||
obuData.skipBits(3); // additional_frame_id_length_minus_1
|
||||
}
|
||||
// use_128x128_superblock, enable_filter_intra, and enable_intra_edge_filter
|
||||
obuData.skipBits(3);
|
||||
// enable_interintra_compound, enable_masked_compound, enable_warped_motion, and
|
||||
// enable_dual_filter
|
||||
obuData.skipBits(4);
|
||||
boolean enableOrderHint = obuData.readBit();
|
||||
if (enableOrderHint) {
|
||||
obuData.skipBits(2); // enable_jnt_comp and enable_ref_frame_mvs
|
||||
}
|
||||
boolean seqChooseScreenContentTools = obuData.readBit();
|
||||
if (seqChooseScreenContentTools) {
|
||||
seqForceScreenContentTools = true;
|
||||
} else {
|
||||
seqForceScreenContentTools = obuData.readBit();
|
||||
}
|
||||
if (seqForceScreenContentTools) {
|
||||
boolean seqChooseIntegerMv = obuData.readBit();
|
||||
if (seqChooseIntegerMv) {
|
||||
seqForceIntegerMv = true;
|
||||
} else {
|
||||
seqForceIntegerMv = obuData.readBit();
|
||||
}
|
||||
} else {
|
||||
if (reducedStillPictureHeader) {
|
||||
seqForceIntegerMv = true;
|
||||
}
|
||||
if (enableOrderHint) {
|
||||
int orderHintBitsMinus1 = obuData.readBits(3);
|
||||
orderHintBits = orderHintBitsMinus1 + 1;
|
||||
} else {
|
||||
seqForceScreenContentTools = true;
|
||||
orderHintBits = 0;
|
||||
} else {
|
||||
// enable_interintra_compound, enable_masked_compound, enable_warped_motion, and
|
||||
// enable_dual_filter
|
||||
obuData.skipBits(4);
|
||||
boolean enableOrderHint = obuData.readBit();
|
||||
if (enableOrderHint) {
|
||||
obuData.skipBits(2); // enable_jnt_comp and enable_ref_frame_mvs
|
||||
}
|
||||
boolean seqChooseScreenContentTools = obuData.readBit();
|
||||
if (seqChooseScreenContentTools) {
|
||||
seqForceScreenContentTools = true;
|
||||
} else {
|
||||
seqForceScreenContentTools = obuData.readBit();
|
||||
}
|
||||
if (seqForceScreenContentTools) {
|
||||
boolean seqChooseIntegerMv = obuData.readBit();
|
||||
if (seqChooseIntegerMv) {
|
||||
seqForceIntegerMv = true;
|
||||
} else {
|
||||
seqForceIntegerMv = obuData.readBit();
|
||||
}
|
||||
} else {
|
||||
seqForceIntegerMv = true;
|
||||
}
|
||||
if (enableOrderHint) {
|
||||
int orderHintBitsMinus1 = obuData.readBits(3);
|
||||
orderHintBits = orderHintBitsMinus1 + 1;
|
||||
} else {
|
||||
orderHintBits = 0;
|
||||
}
|
||||
}
|
||||
this.seqLevelIdx0 = seqLevelIdx0;
|
||||
this.seqTier0 = seqTier0;
|
||||
this.initialDisplayDelayMinus1 = initialDisplayDelayMinus1;
|
||||
// enable_superres, enable_cdef, enable_restoration
|
||||
obuData.skipBits(3);
|
||||
// Begin Color Config
|
||||
highBitdepth = obuData.readBit();
|
||||
if (seqProfile == 2 && highBitdepth) {
|
||||
twelveBit = obuData.readBit();
|
||||
} else {
|
||||
twelveBit = false;
|
||||
}
|
||||
if (seqProfile != 1) {
|
||||
monochrome = obuData.readBit();
|
||||
} else {
|
||||
monochrome = false;
|
||||
}
|
||||
boolean colorDescriptionPresent = obuData.readBit();
|
||||
if (colorDescriptionPresent) {
|
||||
colorPrimaries = (byte) obuData.readBits(8);
|
||||
transferCharacteristics = (byte) obuData.readBits(8);
|
||||
matrixCoefficients = (byte) obuData.readBits(8);
|
||||
} else {
|
||||
colorPrimaries = 0;
|
||||
transferCharacteristics = 0;
|
||||
matrixCoefficients = 0;
|
||||
}
|
||||
if (monochrome) {
|
||||
obuData.skipBit(); // color_range
|
||||
subsamplingX = false;
|
||||
subsamplingY = false;
|
||||
chromaSamplePosition = 0;
|
||||
} else if (colorPrimaries == 0x1 /* CP_BT_709 */
|
||||
&& transferCharacteristics == 13 /* TC_SRGB */
|
||||
&& matrixCoefficients == 0x0 /* MC_IDENTITY */) {
|
||||
// Nothing to read from obu.
|
||||
subsamplingX = false;
|
||||
subsamplingY = false;
|
||||
chromaSamplePosition = 0;
|
||||
} else {
|
||||
obuData.skipBit(); // color_range
|
||||
if (seqProfile == 0) {
|
||||
subsamplingX = true;
|
||||
subsamplingY = true;
|
||||
} else if (seqProfile == 1) {
|
||||
subsamplingX = false;
|
||||
subsamplingY = false;
|
||||
} else {
|
||||
if (twelveBit) {
|
||||
subsamplingX = obuData.readBit();
|
||||
if (subsamplingX) {
|
||||
subsamplingY = obuData.readBit();
|
||||
} else {
|
||||
subsamplingY = false;
|
||||
}
|
||||
} else {
|
||||
subsamplingX = true;
|
||||
subsamplingY = false;
|
||||
}
|
||||
}
|
||||
if (subsamplingX && subsamplingY) {
|
||||
chromaSamplePosition = obuData.readBits(2);
|
||||
} else {
|
||||
chromaSamplePosition = 0;
|
||||
}
|
||||
}
|
||||
obuData.skipBit(); // separate_uv_delta_q
|
||||
}
|
||||
|
||||
/** Advances the bit array by skipping the {@code timing_info()} syntax element. */
|
||||
|
@ -87,6 +87,20 @@ public class ObuParserTest {
|
||||
assertThat(sequenceHeader.seqForceScreenContentTools).isTrue();
|
||||
assertThat(sequenceHeader.seqForceIntegerMv).isTrue();
|
||||
assertThat(sequenceHeader.orderHintBits).isEqualTo(7);
|
||||
assertThat(sequenceHeader.seqProfile).isEqualTo(0);
|
||||
assertThat(sequenceHeader.seqLevelIdx0).isEqualTo(4);
|
||||
assertThat(sequenceHeader.seqTier0).isEqualTo(0);
|
||||
assertThat(sequenceHeader.initialDisplayDelayPresentFlag).isFalse();
|
||||
assertThat(sequenceHeader.initialDisplayDelayMinus1).isEqualTo(0);
|
||||
assertThat(sequenceHeader.highBitdepth).isFalse();
|
||||
assertThat(sequenceHeader.twelveBit).isFalse();
|
||||
assertThat(sequenceHeader.monochrome).isFalse();
|
||||
assertThat(sequenceHeader.subsamplingX).isTrue();
|
||||
assertThat(sequenceHeader.subsamplingY).isTrue();
|
||||
assertThat(sequenceHeader.chromaSamplePosition).isEqualTo(0);
|
||||
assertThat(sequenceHeader.colorPrimaries).isEqualTo(1);
|
||||
assertThat(sequenceHeader.transferCharacteristics).isEqualTo(1);
|
||||
assertThat(sequenceHeader.matrixCoefficients).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -31,7 +31,11 @@ public class DefaultHttpDataSourceContractTest extends DataSourceContractTest {
|
||||
|
||||
@Override
|
||||
protected DataSource createDataSource() {
|
||||
return new DefaultHttpDataSource.Factory().createDataSource();
|
||||
return new DefaultHttpDataSource.Factory()
|
||||
// Ensure that 'resource not found' tests fail fast (b/403179253).
|
||||
.setConnectTimeoutMs(400)
|
||||
.setReadTimeoutMs(400)
|
||||
.createDataSource();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -61,7 +61,11 @@ public class HttpEngineDataSourceContractTest extends DataSourceContractTest {
|
||||
protected DataSource createDataSource() {
|
||||
HttpEngine httpEngine =
|
||||
new HttpEngine.Builder(ApplicationProvider.getApplicationContext()).build();
|
||||
return new HttpEngineDataSource.Factory(httpEngine, executorService).createDataSource();
|
||||
return new HttpEngineDataSource.Factory(httpEngine, executorService)
|
||||
// Ensure that 'resource not found' tests fail fast (b/403179253).
|
||||
.setConnectionTimeoutMs(400)
|
||||
.setReadTimeoutMs(400)
|
||||
.createDataSource();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -36,16 +36,21 @@ import androidx.media3.common.PlaybackException;
|
||||
import androidx.media3.common.util.Assertions;
|
||||
import androidx.media3.common.util.Clock;
|
||||
import androidx.media3.common.util.ConditionVariable;
|
||||
import androidx.media3.common.util.Log;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import com.google.common.base.Ascii;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.net.HttpHeaders;
|
||||
import com.google.common.primitives.Longs;
|
||||
import com.google.errorprone.annotations.CanIgnoreReturnValue;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.net.CookieHandler;
|
||||
import java.net.CookieManager;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.URI;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
@ -321,6 +326,8 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
// The size of read buffer passed to cronet UrlRequest.read().
|
||||
private static final int READ_BUFFER_SIZE_BYTES = 32 * 1024;
|
||||
|
||||
private static final String TAG = "HttpEngineDataSource";
|
||||
|
||||
private final HttpEngine httpEngine;
|
||||
private final Executor executor;
|
||||
private final int requestPriority;
|
||||
@ -709,7 +716,7 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
@UnstableApi
|
||||
@VisibleForTesting
|
||||
@Nullable
|
||||
UrlRequest.Callback getCurrentUrlRequestCallback() {
|
||||
UrlRequestCallback getCurrentUrlRequestCallback() {
|
||||
return currentUrlRequestWrapper == null
|
||||
? null
|
||||
: currentUrlRequestWrapper.getUrlRequestCallback();
|
||||
@ -932,14 +939,6 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
return false;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static String parseCookies(@Nullable List<String> setCookieHeaders) {
|
||||
if (setCookieHeaders == null || setCookieHeaders.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return TextUtils.join(";", setCookieHeaders);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static String getFirstHeader(Map<String, List<String>> allHeaders, String headerName) {
|
||||
@Nullable List<String> headers = allHeaders.get(headerName);
|
||||
@ -957,6 +956,61 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
return remaining;
|
||||
}
|
||||
|
||||
// Stores the cookie headers from the response in the default {@link CookieHandler}.
|
||||
private static void storeCookiesFromHeaders(UrlResponseInfo info) {
|
||||
storeCookiesFromHeaders(info, CookieHandler.getDefault());
|
||||
}
|
||||
|
||||
// Stores the cookie headers from the response in the provided {@link CookieHandler}.
|
||||
private static void storeCookiesFromHeaders(
|
||||
UrlResponseInfo info, @Nullable CookieHandler cookieHandler) {
|
||||
if (cookieHandler == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
cookieHandler.put(new URI(info.getUrl()), info.getHeaders().getAsMap());
|
||||
} catch (Exception e) {
|
||||
Log.w(TAG, "Failed to store cookies in CookieHandler", e);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
/* private */ static String getCookieHeader(String url) {
|
||||
return getCookieHeader(url, ImmutableMap.of(), CookieHandler.getDefault());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
/* private */ static String getCookieHeader(String url, @Nullable CookieHandler cookieHandler) {
|
||||
return getCookieHeader(url, ImmutableMap.of(), cookieHandler);
|
||||
}
|
||||
|
||||
// getCookieHeader maps Set-Cookie2 (RFC 2965) to Cookie just like CookieManager does.
|
||||
private static String getCookieHeader(
|
||||
String url, Map<String, List<String>> headers, @Nullable CookieHandler cookieHandler) {
|
||||
if (cookieHandler == null) {
|
||||
return "";
|
||||
}
|
||||
|
||||
Map<String, List<String>> cookieHeaders = ImmutableMap.of();
|
||||
try {
|
||||
cookieHeaders = cookieHandler.get(new URI(url), headers);
|
||||
} catch (Exception e) {
|
||||
Log.w(TAG, "Failed to read cookies from CookieHandler", e);
|
||||
}
|
||||
|
||||
StringBuilder cookies = new StringBuilder();
|
||||
if (cookieHeaders.containsKey(HttpHeaders.COOKIE)) {
|
||||
List<String> cookiesList = cookieHeaders.get(HttpHeaders.COOKIE);
|
||||
if (cookiesList != null) {
|
||||
for (String cookie : cookiesList) {
|
||||
cookies.append(cookie).append("; ");
|
||||
}
|
||||
}
|
||||
}
|
||||
return cookies.toString().stripTrailing();
|
||||
}
|
||||
|
||||
/**
|
||||
* A wrapper class that manages a {@link UrlRequest} and the {@link UrlRequestCallback} associated
|
||||
* with that request.
|
||||
@ -984,7 +1038,7 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
urlRequest.cancel();
|
||||
}
|
||||
|
||||
public UrlRequest.Callback getUrlRequestCallback() {
|
||||
public UrlRequestCallback getUrlRequestCallback() {
|
||||
return urlRequestCallback;
|
||||
}
|
||||
|
||||
@ -1004,8 +1058,7 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
}
|
||||
}
|
||||
|
||||
private final class UrlRequestCallback implements UrlRequest.Callback {
|
||||
|
||||
final class UrlRequestCallback implements UrlRequest.Callback {
|
||||
private volatile boolean isClosed = false;
|
||||
|
||||
public void close() {
|
||||
@ -1040,6 +1093,18 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
resetConnectTimeout();
|
||||
}
|
||||
|
||||
CookieHandler cookieHandler = CookieHandler.getDefault();
|
||||
|
||||
if (cookieHandler == null && handleSetCookieRequests) {
|
||||
// a temporary CookieManager is created for the duration of this request - this guarantees
|
||||
// redirects preserve the cookies correctly.
|
||||
cookieHandler = new CookieManager();
|
||||
}
|
||||
|
||||
storeCookiesFromHeaders(info, cookieHandler);
|
||||
String cookieHeaders =
|
||||
getCookieHeader(info.getUrl(), info.getHeaders().getAsMap(), cookieHandler);
|
||||
|
||||
boolean shouldKeepPost =
|
||||
keepPostFor302Redirects
|
||||
&& dataSpec.httpMethod == DataSpec.HTTP_METHOD_POST
|
||||
@ -1047,17 +1112,12 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
|
||||
// request.followRedirect() transforms a POST request into a GET request, so if we want to
|
||||
// keep it as a POST we need to fall through to the manual redirect logic below.
|
||||
if (!shouldKeepPost && !handleSetCookieRequests) {
|
||||
request.followRedirect();
|
||||
return;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
String cookieHeadersValue =
|
||||
parseCookies(info.getHeaders().getAsMap().get(HttpHeaders.SET_COOKIE));
|
||||
if (!shouldKeepPost && TextUtils.isEmpty(cookieHeadersValue)) {
|
||||
request.followRedirect();
|
||||
return;
|
||||
if (!shouldKeepPost) {
|
||||
// No cookies, or we're not handling them - so just follow the redirect.
|
||||
if (!handleSetCookieRequests || TextUtils.isEmpty(cookieHeaders)) {
|
||||
request.followRedirect();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
request.cancel();
|
||||
@ -1075,13 +1135,15 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
} else {
|
||||
redirectUrlDataSpec = dataSpec.withUri(Uri.parse(newLocationUrl));
|
||||
}
|
||||
if (!TextUtils.isEmpty(cookieHeadersValue)) {
|
||||
|
||||
if (!TextUtils.isEmpty(cookieHeaders)) {
|
||||
Map<String, String> requestHeaders = new HashMap<>();
|
||||
requestHeaders.putAll(dataSpec.httpRequestHeaders);
|
||||
requestHeaders.put(HttpHeaders.COOKIE, cookieHeadersValue);
|
||||
requestHeaders.put(HttpHeaders.COOKIE, cookieHeaders);
|
||||
redirectUrlDataSpec =
|
||||
redirectUrlDataSpec.buildUpon().setHttpRequestHeaders(requestHeaders).build();
|
||||
}
|
||||
|
||||
UrlRequestWrapper redirectUrlRequestWrapper;
|
||||
try {
|
||||
redirectUrlRequestWrapper = buildRequestWrapper(redirectUrlDataSpec);
|
||||
@ -1101,6 +1163,7 @@ public final class HttpEngineDataSource extends BaseDataSource implements HttpDa
|
||||
if (isClosed) {
|
||||
return;
|
||||
}
|
||||
storeCookiesFromHeaders(info);
|
||||
responseInfo = info;
|
||||
operation.open();
|
||||
}
|
||||
|
@ -159,7 +159,7 @@ public final class CacheWriter {
|
||||
try {
|
||||
resolvedLength = dataSource.open(boundedDataSpec);
|
||||
isDataSourceOpen = true;
|
||||
} catch (IOException e) {
|
||||
} catch (Exception e) {
|
||||
DataSourceUtil.closeQuietly(dataSource);
|
||||
}
|
||||
}
|
||||
@ -172,7 +172,7 @@ public final class CacheWriter {
|
||||
dataSpec.buildUpon().setPosition(position).setLength(C.LENGTH_UNSET).build();
|
||||
try {
|
||||
resolvedLength = dataSource.open(unboundedDataSpec);
|
||||
} catch (IOException e) {
|
||||
} catch (Exception e) {
|
||||
DataSourceUtil.closeQuietly(dataSource);
|
||||
throw e;
|
||||
}
|
||||
@ -195,7 +195,7 @@ public final class CacheWriter {
|
||||
if (isLastBlock) {
|
||||
onRequestEndPosition(position + totalBytesRead);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
} catch (Exception e) {
|
||||
DataSourceUtil.closeQuietly(dataSource);
|
||||
throw e;
|
||||
}
|
||||
|
@ -45,9 +45,14 @@ import androidx.media3.common.util.Util;
|
||||
import androidx.media3.datasource.HttpDataSource.HttpDataSourceException;
|
||||
import androidx.media3.datasource.HttpDataSource.InvalidResponseCodeException;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.net.CookieHandler;
|
||||
import java.net.CookieManager;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.URI;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
@ -80,8 +85,14 @@ public final class HttpEngineDataSourceTest {
|
||||
|
||||
private static final int TEST_CONNECT_TIMEOUT_MS = 100;
|
||||
private static final int TEST_READ_TIMEOUT_MS = 100;
|
||||
private static final String TEST_URL = "http://google.com";
|
||||
private static final String TEST_URL = "http://google.com/video/";
|
||||
private static final String TEST_CONTENT_TYPE = "test/test";
|
||||
private static final String TEST_REQUEST_COOKIE = "foo=bar";
|
||||
private static final String TEST_REQUEST_COOKIE_2 = "baz=qux";
|
||||
private static final String TEST_RESPONSE_SET_COOKIE =
|
||||
TEST_REQUEST_COOKIE + ";path=/video; expires 31-12-2099 23:59:59 GMT";
|
||||
private static final String TEST_RESPONSE_SET_COOKIE_2 =
|
||||
TEST_REQUEST_COOKIE_2 + ";path=/; expires 31-12-2099 23:59:59 GMT";
|
||||
private static final byte[] TEST_POST_BODY = Util.getUtf8Bytes("test post body");
|
||||
private static final long TEST_CONTENT_LENGTH = 16000L;
|
||||
|
||||
@ -141,6 +152,8 @@ public final class HttpEngineDataSourceTest {
|
||||
// This value can be anything since the DataSpec is unset.
|
||||
testResponseHeader.put("Content-Length", Long.toString(TEST_CONTENT_LENGTH));
|
||||
testUrlResponseInfo = createUrlResponseInfo(/* statusCode= */ 200);
|
||||
|
||||
CookieHandler.setDefault(null);
|
||||
}
|
||||
|
||||
@After
|
||||
@ -272,15 +285,15 @@ public final class HttpEngineDataSourceTest {
|
||||
@Test
|
||||
public void requestHeadersSet() throws HttpDataSourceException {
|
||||
Map<String, String> headersSet = new HashMap<>();
|
||||
doAnswer(
|
||||
when(mockUrlRequestBuilder.addHeader(
|
||||
ArgumentMatchers.anyString(), ArgumentMatchers.anyString()))
|
||||
.thenAnswer(
|
||||
(invocation) -> {
|
||||
String key = invocation.getArgument(0);
|
||||
String value = invocation.getArgument(1);
|
||||
headersSet.put(key, value);
|
||||
return null;
|
||||
})
|
||||
.when(mockUrlRequestBuilder)
|
||||
.addHeader(ArgumentMatchers.anyString(), ArgumentMatchers.anyString());
|
||||
});
|
||||
|
||||
dataSourceUnderTest.setRequestProperty("defaultHeader2", "dataSourceOverridesDefault");
|
||||
dataSourceUnderTest.setRequestProperty("dataSourceHeader1", "dataSourceValue1");
|
||||
@ -447,8 +460,7 @@ public final class HttpEngineDataSourceTest {
|
||||
assertThat(e).isInstanceOf(HttpDataSource.InvalidContentTypeException.class);
|
||||
// Check for connection not automatically closed.
|
||||
verify(mockUrlRequest, never()).cancel();
|
||||
assertThat(testedContentTypes).hasSize(1);
|
||||
assertThat(testedContentTypes.get(0)).isEqualTo(TEST_CONTENT_TYPE);
|
||||
assertThat(testedContentTypes).containsExactly(TEST_CONTENT_TYPE);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1277,7 +1289,7 @@ public final class HttpEngineDataSourceTest {
|
||||
.createDataSource();
|
||||
mockSingleRedirectSuccess(/* responseCode= */ 302);
|
||||
dataSourceUnderTest.setRequestProperty("Content-Type", TEST_CONTENT_TYPE);
|
||||
testResponseHeader.put("Set-Cookie", "testcookie=testcookie; Path=/video");
|
||||
testResponseHeader.put("Set-Cookie", TEST_RESPONSE_SET_COOKIE);
|
||||
|
||||
dataSourceUnderTest.open(testPostDataSpec);
|
||||
|
||||
@ -1449,6 +1461,64 @@ public final class HttpEngineDataSourceTest {
|
||||
verify(mockUrlRequestBuilder).setDirectExecutorAllowed(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCookieHeader_noCookieHandler() {
|
||||
assertThat(HttpEngineDataSource.getCookieHeader(TEST_URL)).isEmpty();
|
||||
assertThat(CookieHandler.getDefault()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCookieHeader_emptyCookieHandler() {
|
||||
CookieHandler.setDefault(new CookieManager());
|
||||
assertThat(HttpEngineDataSource.getCookieHeader(TEST_URL)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCookieHeader_cookieHandler() throws Exception {
|
||||
CookieManager cm = new CookieManager();
|
||||
cm.put(
|
||||
new URI(TEST_URL),
|
||||
ImmutableMap.of(
|
||||
"Set-Cookie", ImmutableList.of(TEST_RESPONSE_SET_COOKIE, TEST_RESPONSE_SET_COOKIE_2)));
|
||||
CookieHandler.setDefault(cm);
|
||||
|
||||
assertThat(HttpEngineDataSource.getCookieHeader(TEST_URL))
|
||||
.isEqualTo(TEST_REQUEST_COOKIE + "; " + TEST_REQUEST_COOKIE_2 + ";");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCookieHeader_cookieHandlerCustomHandler() throws Exception {
|
||||
CookieManager cm = new CookieManager();
|
||||
cm.put(
|
||||
new URI(TEST_URL),
|
||||
ImmutableMap.of(
|
||||
"Set-Cookie", ImmutableList.of(TEST_RESPONSE_SET_COOKIE, TEST_RESPONSE_SET_COOKIE_2)));
|
||||
|
||||
assertThat(HttpEngineDataSource.getCookieHeader(TEST_URL, cm))
|
||||
.isEqualTo(TEST_REQUEST_COOKIE + "; " + TEST_REQUEST_COOKIE_2 + ";");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getCookieHeader_cookieHandlerCookie2() throws Exception {
|
||||
CookieManager cm = new CookieManager();
|
||||
cm.put(
|
||||
new URI(TEST_URL),
|
||||
ImmutableMap.of(
|
||||
"Set-Cookie2", ImmutableList.of(TEST_RESPONSE_SET_COOKIE, TEST_RESPONSE_SET_COOKIE_2)));
|
||||
CookieHandler.setDefault(cm);
|
||||
|
||||
// This asserts the surprising behavior of CookieManager - Set-Cookie2 is translated to Cookie,
|
||||
// not Cookie2.
|
||||
assertThat(cm.get(new URI(TEST_URL), ImmutableMap.of("", ImmutableList.of()))).isNotEmpty();
|
||||
assertThat(cm.get(new URI(TEST_URL), ImmutableMap.of("", ImmutableList.of())).get("Cookie"))
|
||||
.containsExactly(TEST_REQUEST_COOKIE, TEST_REQUEST_COOKIE_2);
|
||||
assertThat(cm.get(new URI(TEST_URL), ImmutableMap.of("", ImmutableList.of())))
|
||||
.doesNotContainKey("Cookie2");
|
||||
|
||||
assertThat(HttpEngineDataSource.getCookieHeader(TEST_URL))
|
||||
.isEqualTo(TEST_REQUEST_COOKIE + "; " + TEST_REQUEST_COOKIE_2 + ";");
|
||||
}
|
||||
|
||||
// Helper methods.
|
||||
|
||||
private void mockStatusResponse() {
|
||||
|
@ -22,8 +22,10 @@ import static org.junit.Assert.assertThrows;
|
||||
import android.net.Uri;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.datasource.DataSource;
|
||||
import androidx.media3.datasource.DataSpec;
|
||||
import androidx.media3.datasource.FileDataSource;
|
||||
import androidx.media3.datasource.TransferListener;
|
||||
import androidx.media3.test.utils.FailOnCloseDataSink;
|
||||
import androidx.media3.test.utils.FakeDataSet;
|
||||
import androidx.media3.test.utils.FakeDataSource;
|
||||
@ -263,6 +265,119 @@ public final class CacheWriterTest {
|
||||
assertCachedData(cache, fakeDataSet);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void cache_ioExceptionDuringOpen_closesDataSource() {
|
||||
FakeDataSet fakeDataSet = new FakeDataSet().newData("test_data").appendReadData(1).endData();
|
||||
FakeDataSource dataSource = new FakeDataSource(fakeDataSet);
|
||||
dataSource.addTransferListener(
|
||||
new TransferListener() {
|
||||
@Override
|
||||
public void onTransferInitializing(
|
||||
DataSource source, DataSpec dataSpec, boolean isNetwork) {
|
||||
Util.sneakyThrow(new IOException());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTransferStart(DataSource source, DataSpec dataSpec, boolean isNetwork) {}
|
||||
|
||||
@Override
|
||||
public void onBytesTransferred(
|
||||
DataSource source, DataSpec dataSpec, boolean isNetwork, int bytesTransferred) {}
|
||||
|
||||
@Override
|
||||
public void onTransferEnd(DataSource source, DataSpec dataSpec, boolean isNetwork) {}
|
||||
});
|
||||
CacheWriter cacheWriter =
|
||||
new CacheWriter(
|
||||
new CacheDataSource(cache, dataSource),
|
||||
new DataSpec(Uri.parse("test_data")),
|
||||
/* temporaryBuffer= */ null,
|
||||
new CachingCounters());
|
||||
|
||||
assertThrows(IOException.class, cacheWriter::cache);
|
||||
|
||||
assertThat(dataSource.isOpened()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void cache_ioExceptionDuringRead_closesDataSource() {
|
||||
FakeDataSet fakeDataSet =
|
||||
new FakeDataSet()
|
||||
.newData("test_data")
|
||||
.appendReadError(new IOException())
|
||||
.appendReadData(1)
|
||||
.endData();
|
||||
FakeDataSource dataSource = new FakeDataSource(fakeDataSet);
|
||||
CacheWriter cacheWriter =
|
||||
new CacheWriter(
|
||||
new CacheDataSource(cache, dataSource),
|
||||
new DataSpec(Uri.parse("test_data")),
|
||||
/* temporaryBuffer= */ null,
|
||||
new CachingCounters());
|
||||
|
||||
assertThrows(IOException.class, cacheWriter::cache);
|
||||
|
||||
assertThat(dataSource.isOpened()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void cache_nonIoExceptionDuringOpen_closesDataSource() {
|
||||
FakeDataSet fakeDataSet = new FakeDataSet().newData("test_data").appendReadData(1).endData();
|
||||
FakeDataSource dataSource = new FakeDataSource(fakeDataSet);
|
||||
dataSource.addTransferListener(
|
||||
new TransferListener() {
|
||||
@Override
|
||||
public void onTransferInitializing(
|
||||
DataSource source, DataSpec dataSpec, boolean isNetwork) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTransferStart(DataSource source, DataSpec dataSpec, boolean isNetwork) {}
|
||||
|
||||
@Override
|
||||
public void onBytesTransferred(
|
||||
DataSource source, DataSpec dataSpec, boolean isNetwork, int bytesTransferred) {}
|
||||
|
||||
@Override
|
||||
public void onTransferEnd(DataSource source, DataSpec dataSpec, boolean isNetwork) {}
|
||||
});
|
||||
CacheWriter cacheWriter =
|
||||
new CacheWriter(
|
||||
new CacheDataSource(cache, dataSource),
|
||||
new DataSpec(Uri.parse("test_data")),
|
||||
/* temporaryBuffer= */ null,
|
||||
new CachingCounters());
|
||||
|
||||
assertThrows(IllegalStateException.class, cacheWriter::cache);
|
||||
|
||||
assertThat(dataSource.isOpened()).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void cache_nonIoExceptionDuringRead_closesDataSource() {
|
||||
FakeDataSet fakeDataSet =
|
||||
new FakeDataSet()
|
||||
.newData("test_data")
|
||||
.appendReadAction(
|
||||
() -> {
|
||||
throw new IllegalStateException();
|
||||
})
|
||||
.appendReadData(1)
|
||||
.endData();
|
||||
FakeDataSource dataSource = new FakeDataSource(fakeDataSet);
|
||||
CacheWriter cacheWriter =
|
||||
new CacheWriter(
|
||||
new CacheDataSource(cache, dataSource),
|
||||
new DataSpec(Uri.parse("test_data")),
|
||||
/* temporaryBuffer= */ null,
|
||||
new CachingCounters());
|
||||
|
||||
assertThrows(IllegalStateException.class, cacheWriter::cache);
|
||||
|
||||
assertThat(dataSource.isOpened()).isFalse();
|
||||
}
|
||||
|
||||
private static final class CachingCounters implements CacheWriter.ProgressListener {
|
||||
|
||||
private long contentLength = C.LENGTH_UNSET;
|
||||
|
@ -54,7 +54,11 @@ public class CronetDataSourceContractTest extends DataSourceContractTest {
|
||||
}
|
||||
CronetEngine cronetEngine = provider.createBuilder().setUserAgent("test-agent").build();
|
||||
dataSources.add(
|
||||
new CronetDataSource.Factory(cronetEngine, executorService).createDataSource());
|
||||
new CronetDataSource.Factory(cronetEngine, executorService)
|
||||
// Ensure that 'resource not found' tests fail fast (b/403179253).
|
||||
.setConnectionTimeoutMs(400)
|
||||
.setReadTimeoutMs(400)
|
||||
.createDataSource());
|
||||
}
|
||||
return dataSources.build();
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.util.TraceUtil;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.decoder.CryptoConfig;
|
||||
import androidx.media3.decoder.Decoder;
|
||||
import androidx.media3.decoder.DecoderInputBuffer;
|
||||
@ -35,6 +34,7 @@ import androidx.media3.exoplayer.DecoderReuseEvaluation;
|
||||
import androidx.media3.exoplayer.RendererCapabilities;
|
||||
import androidx.media3.exoplayer.video.DecoderVideoRenderer;
|
||||
import androidx.media3.exoplayer.video.VideoRendererEventListener;
|
||||
import java.util.Objects;
|
||||
|
||||
// TODO: Merge actual implementation in https://github.com/google/ExoPlayer/pull/7132.
|
||||
/**
|
||||
@ -124,7 +124,7 @@ public final class ExperimentalFfmpegVideoRenderer extends DecoderVideoRenderer
|
||||
@Override
|
||||
protected DecoderReuseEvaluation canReuseDecoder(
|
||||
String decoderName, Format oldFormat, Format newFormat) {
|
||||
boolean sameMimeType = Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType);
|
||||
boolean sameMimeType = Objects.equals(oldFormat.sampleMimeType, newFormat.sampleMimeType);
|
||||
// TODO: Ability to reuse the decoder may be MIME type dependent.
|
||||
return new DecoderReuseEvaluation(
|
||||
decoderName,
|
||||
|
@ -18,17 +18,27 @@ package androidx.media3.decoder.flac;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import androidx.media3.test.utils.ExtractorAsserts;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import androidx.media3.test.utils.ExtractorAsserts.AssertionConfig;
|
||||
import androidx.media3.test.utils.ExtractorAsserts.SimulationConfig;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameter;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
/** Unit test for {@link FlacExtractor}. */
|
||||
// TODO(internal: b/26110951): Use org.junit.runners.Parameterized (and corresponding methods on
|
||||
// ExtractorAsserts) when it's supported by our testing infrastructure.
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
@RunWith(Parameterized.class)
|
||||
public class FlacExtractorTest {
|
||||
|
||||
@Parameters(name = "{0}")
|
||||
public static ImmutableList<SimulationConfig> params() {
|
||||
return ExtractorAsserts.configs();
|
||||
}
|
||||
|
||||
@Parameter public ExtractorAsserts.SimulationConfig simulationConfig;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
if (!FlacLibrary.isAvailable()) {
|
||||
@ -38,81 +48,120 @@ public class FlacExtractorTest {
|
||||
|
||||
@Test
|
||||
public void sample() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_raw");
|
||||
"media/flac/bear.flac",
|
||||
new AssertionConfig.Builder().setDumpFilesPrefix("extractordumps/flac/bear_raw").build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sample32bit() throws Exception {
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
"media/flac/bear_32bit.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_32bit_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sampleWithId3HeaderAndId3Enabled() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_with_id3.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_with_id3_enabled_raw");
|
||||
"media/flac/bear_with_id3.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_with_id3_enabled_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sampleWithId3HeaderAndId3Disabled() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
() -> new FlacExtractor(FlacExtractor.FLAG_DISABLE_ID3_METADATA),
|
||||
/* file= */ "media/flac/bear_with_id3.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_with_id3_disabled_raw");
|
||||
"media/flac/bear_with_id3.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_with_id3_disabled_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sampleUnseekable() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_no_seek_table_no_num_samples.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_no_seek_table_no_num_samples_raw");
|
||||
"media/flac/bear_no_seek_table_no_num_samples.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_no_seek_table_no_num_samples_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sampleWithVorbisComments() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_with_vorbis_comments.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_with_vorbis_comments_raw");
|
||||
"media/flac/bear_with_vorbis_comments.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_with_vorbis_comments_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void sampleWithPicture() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_with_picture.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_with_picture_raw");
|
||||
"media/flac/bear_with_picture.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_with_picture_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void oneMetadataBlock() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_one_metadata_block.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_one_metadata_block_raw");
|
||||
"media/flac/bear_one_metadata_block.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_one_metadata_block_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noMinMaxFrameSize() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_no_min_max_frame_size.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_no_min_max_frame_size_raw");
|
||||
"media/flac/bear_no_min_max_frame_size.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_no_min_max_frame_size_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noNumSamples() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_no_num_samples.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_no_num_samples_raw");
|
||||
"media/flac/bear_no_num_samples.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_no_num_samples_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void uncommonSampleRate() throws Exception {
|
||||
ExtractorAsserts.assertAllBehaviors(
|
||||
ExtractorAsserts.assertBehavior(
|
||||
FlacExtractor::new,
|
||||
/* file= */ "media/flac/bear_uncommon_sample_rate.flac",
|
||||
/* dumpFilesPrefix= */ "extractordumps/flac/bear_uncommon_sample_rate_raw");
|
||||
"media/flac/bear_uncommon_sample_rate.flac",
|
||||
new AssertionConfig.Builder()
|
||||
.setDumpFilesPrefix("extractordumps/flac/bear_uncommon_sample_rate_raw")
|
||||
.build(),
|
||||
simulationConfig);
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,6 @@ import androidx.media3.exoplayer.ExoPlayer;
|
||||
import androidx.media3.exoplayer.Renderer;
|
||||
import androidx.media3.exoplayer.RenderersFactory;
|
||||
import androidx.media3.exoplayer.audio.AudioSink;
|
||||
import androidx.media3.exoplayer.audio.DefaultAudioSink;
|
||||
import androidx.media3.exoplayer.source.MediaSource;
|
||||
import androidx.media3.exoplayer.source.ProgressiveMediaSource;
|
||||
import androidx.media3.extractor.mkv.MatroskaExtractor;
|
||||
@ -48,6 +47,7 @@ public class FlacPlaybackTest {
|
||||
|
||||
private static final String BEAR_FLAC_16BIT = "mka/bear-flac-16bit.mka";
|
||||
private static final String BEAR_FLAC_24BIT = "mka/bear-flac-24bit.mka";
|
||||
private static final String BEAR_FLAC_32BIT = "mka/bear-flac-32bit.mka";
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
@ -66,10 +66,13 @@ public class FlacPlaybackTest {
|
||||
playAndAssertAudioSinkInput(BEAR_FLAC_24BIT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test32BitPlayback() throws Exception {
|
||||
playAndAssertAudioSinkInput(BEAR_FLAC_32BIT);
|
||||
}
|
||||
|
||||
private static void playAndAssertAudioSinkInput(String fileName) throws Exception {
|
||||
CapturingAudioSink audioSink =
|
||||
new CapturingAudioSink(
|
||||
new DefaultAudioSink.Builder(ApplicationProvider.getApplicationContext()).build());
|
||||
CapturingAudioSink audioSink = CapturingAudioSink.create();
|
||||
|
||||
TestPlaybackRunnable testPlaybackRunnable =
|
||||
new TestPlaybackRunnable(
|
||||
|
@ -34,7 +34,6 @@ import androidx.media3.exoplayer.ExoPlayer;
|
||||
import androidx.media3.exoplayer.Renderer;
|
||||
import androidx.media3.exoplayer.RenderersFactory;
|
||||
import androidx.media3.exoplayer.audio.AudioSink;
|
||||
import androidx.media3.exoplayer.audio.DefaultAudioSink;
|
||||
import androidx.media3.exoplayer.source.MediaSource;
|
||||
import androidx.media3.exoplayer.source.ProgressiveMediaSource;
|
||||
import androidx.media3.extractor.mp4.Mp4Extractor;
|
||||
@ -63,9 +62,7 @@ public class IamfPlaybackTest {
|
||||
}
|
||||
|
||||
private static void playAndAssertAudioSinkOutput(String fileName) throws Exception {
|
||||
CapturingAudioSink audioSink =
|
||||
new CapturingAudioSink(
|
||||
new DefaultAudioSink.Builder(ApplicationProvider.getApplicationContext()).build());
|
||||
CapturingAudioSink audioSink = CapturingAudioSink.create();
|
||||
|
||||
TestPlaybackRunnable testPlaybackRunnable =
|
||||
new TestPlaybackRunnable(
|
||||
|
@ -26,6 +26,7 @@ import androidx.media3.common.AudioAttributes;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.audio.AudioManagerCompat;
|
||||
import androidx.media3.common.util.TraceUtil;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
@ -95,10 +96,7 @@ public class LibiamfAudioRenderer extends DecoderAudioRenderer<IamfDecoder> {
|
||||
return false;
|
||||
}
|
||||
|
||||
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
if (audioManager == null) {
|
||||
return false;
|
||||
}
|
||||
AudioManager audioManager = AudioManagerCompat.getAudioManager(context);
|
||||
AudioFormat audioFormat =
|
||||
new AudioFormat.Builder()
|
||||
.setEncoding(IamfDecoder.OUTPUT_PCM_ENCODING)
|
||||
|
@ -16,12 +16,15 @@
|
||||
package androidx.media3.decoder.midi;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.decoder.CryptoConfig;
|
||||
import androidx.media3.exoplayer.audio.AudioRendererEventListener;
|
||||
import androidx.media3.exoplayer.audio.AudioSink;
|
||||
import androidx.media3.exoplayer.audio.DecoderAudioRenderer;
|
||||
|
||||
/** Decodes and renders MIDI audio. */
|
||||
@ -30,11 +33,25 @@ public final class MidiRenderer extends DecoderAudioRenderer<MidiDecoder> {
|
||||
|
||||
private final Context context;
|
||||
|
||||
/** Creates the renderer instance. */
|
||||
/**
|
||||
* @deprecated Use {@link #MidiRenderer(Context, Handler, AudioRendererEventListener, AudioSink)}
|
||||
* instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public MidiRenderer(Context context) {
|
||||
this.context = context.getApplicationContext();
|
||||
}
|
||||
|
||||
/** Creates the renderer instance. */
|
||||
public MidiRenderer(
|
||||
Context context,
|
||||
@Nullable Handler eventHandler,
|
||||
@Nullable AudioRendererEventListener eventListener,
|
||||
AudioSink audioSink) {
|
||||
super(eventHandler, eventListener, audioSink);
|
||||
this.context = context.getApplicationContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "MidiRenderer";
|
||||
|
@ -29,7 +29,6 @@ import androidx.media3.exoplayer.ExoPlayer;
|
||||
import androidx.media3.exoplayer.Renderer;
|
||||
import androidx.media3.exoplayer.RenderersFactory;
|
||||
import androidx.media3.exoplayer.audio.AudioSink;
|
||||
import androidx.media3.exoplayer.audio.DefaultAudioSink;
|
||||
import androidx.media3.exoplayer.source.MediaSource;
|
||||
import androidx.media3.exoplayer.source.ProgressiveMediaSource;
|
||||
import androidx.media3.extractor.mkv.MatroskaExtractor;
|
||||
@ -74,9 +73,7 @@ public class OpusPlaybackTest {
|
||||
}
|
||||
|
||||
private void playUri(String fileName) throws Exception {
|
||||
CapturingAudioSink audioSink =
|
||||
new CapturingAudioSink(
|
||||
new DefaultAudioSink.Builder(ApplicationProvider.getApplicationContext()).build());
|
||||
CapturingAudioSink audioSink = CapturingAudioSink.create();
|
||||
|
||||
TestPlaybackRunnable testPlaybackRunnable =
|
||||
new TestPlaybackRunnable(
|
||||
|
@ -225,13 +225,14 @@ public class DefaultVideoFrameProcessorTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
outputFrameCount++;
|
||||
if (outputFrameCount == 30) {
|
||||
firstStreamLastFrameAvailableTimeMs.set(SystemClock.DEFAULT.elapsedRealtime());
|
||||
}
|
||||
defaultVideoFrameProcessor.renderOutputFrame(
|
||||
VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY);
|
||||
/* renderTimeNs= */ SystemClock.DEFAULT.nanoTime());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -312,7 +313,8 @@ public class DefaultVideoFrameProcessorTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
outputFrameAvailableConditionVariable.open();
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,7 @@ import androidx.media3.common.SurfaceInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.util.NullableType;
|
||||
import androidx.media3.common.util.SystemClock;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
@ -149,7 +150,7 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
|
||||
public void controlledFrameRendering_withOneFrameRequestImmediateRender_rendersframe()
|
||||
throws Exception {
|
||||
long originalPresentationTimeUs = 1234;
|
||||
long renderTimesNs = VideoFrameProcessor.RENDER_OUTPUT_FRAME_IMMEDIATELY;
|
||||
long renderTimesNs = SystemClock.DEFAULT.nanoTime();
|
||||
AtomicLong actualPresentationTimeUs = new AtomicLong();
|
||||
processFramesToEndOfStream(
|
||||
/* inputPresentationTimesUs= */ ImmutableList.of(originalPresentationTimeUs),
|
||||
@ -293,7 +294,8 @@ public final class DefaultVideoFrameProcessorVideoFrameRenderingTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
onFrameAvailableListener.onFrameAvailableForRendering(presentationTimeUs);
|
||||
}
|
||||
|
||||
|
@ -139,7 +139,8 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
actualPresentationTimesUs.add(presentationTimeUs);
|
||||
}
|
||||
|
||||
|
@ -24,6 +24,7 @@ import static androidx.media3.test.utils.TestUtil.PSNR_THRESHOLD;
|
||||
import static androidx.media3.test.utils.TestUtil.assertBitmapsAreSimilar;
|
||||
import static androidx.test.core.app.ApplicationProvider.getApplicationContext;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static java.lang.Math.round;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@ -93,7 +94,7 @@ public class LanczosResampleTest {
|
||||
GlTextureInfo inputTextureInfo = setupInputTexture(ORIGINAL_JPG_ASSET_PATH);
|
||||
float scale = 1f / 6;
|
||||
Size outputSize =
|
||||
new Size((int) (inputTextureInfo.width * scale), (int) (inputTextureInfo.height * scale));
|
||||
new Size(round(inputTextureInfo.width * scale), round(inputTextureInfo.height * scale));
|
||||
lanczosShaderProgram =
|
||||
LanczosResample.scaleToFit(outputSize.getWidth(), outputSize.getHeight())
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
@ -109,12 +110,35 @@ public class LanczosResampleTest {
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInputFrame_with6xDownscaleFlexibleOrientation_matchesGoldenFile()
|
||||
throws Exception {
|
||||
GlTextureInfo inputTextureInfo = setupInputTexture(ORIGINAL_JPG_ASSET_PATH);
|
||||
float scale = 1f / 6;
|
||||
Size outputSize =
|
||||
new Size(round(inputTextureInfo.width * scale), round(inputTextureInfo.height * scale));
|
||||
lanczosShaderProgram =
|
||||
LanczosResample.scaleToFitWithFlexibleOrientation(
|
||||
outputSize.getHeight(), outputSize.getWidth())
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
Bitmap expectedBitmap = readBitmap(DOWNSCALED_6X_PNG_ASSET_PATH);
|
||||
|
||||
lanczosShaderProgram.queueInputFrame(
|
||||
new DefaultGlObjectsProvider(eglContext), inputTextureInfo, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInputFrame_with3xUpscale_matchesGoldenFile() throws Exception {
|
||||
GlTextureInfo inputTextureInfo = setupInputTexture(SMALLER_JPG_ASSET_PATH);
|
||||
float scale = 3;
|
||||
Size outputSize =
|
||||
new Size((int) (inputTextureInfo.width * scale), (int) (inputTextureInfo.height * scale));
|
||||
new Size(round(inputTextureInfo.width * scale), round(inputTextureInfo.height * scale));
|
||||
lanczosShaderProgram =
|
||||
LanczosResample.scaleToFit(outputSize.getWidth(), outputSize.getHeight())
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
@ -130,6 +154,29 @@ public class LanczosResampleTest {
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void queueInputFrame_with3xUpscaleFlexibleOrientation_matchesGoldenFile()
|
||||
throws Exception {
|
||||
GlTextureInfo inputTextureInfo = setupInputTexture(SMALLER_JPG_ASSET_PATH);
|
||||
float scale = 3;
|
||||
Size outputSize =
|
||||
new Size((int) (inputTextureInfo.width * scale), (int) (inputTextureInfo.height * scale));
|
||||
lanczosShaderProgram =
|
||||
LanczosResample.scaleToFitWithFlexibleOrientation(
|
||||
outputSize.getWidth(), outputSize.getHeight())
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
Bitmap expectedBitmap = readBitmap(UPSCALED_3X_PNG_ASSET_PATH);
|
||||
|
||||
lanczosShaderProgram.queueInputFrame(
|
||||
new DefaultGlObjectsProvider(eglContext), inputTextureInfo, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_whenSizeDoesntChange_returnsTrue() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFit(720, 1280);
|
||||
@ -137,6 +184,14 @@ public class LanczosResampleTest {
|
||||
assertThat(lanczosResample.isNoOp(720, 1280)).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_whenSizeDoesntChangeFlexibleOrientation_returnsTrue() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFitWithFlexibleOrientation(720, 1280);
|
||||
|
||||
assertThat(lanczosResample.isNoOp(720, 1280)).isTrue();
|
||||
assertThat(lanczosResample.isNoOp(1280, 720)).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_forSmallScalingFactors_returnsTrue() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFit(1920, 1072);
|
||||
@ -145,12 +200,28 @@ public class LanczosResampleTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_forLargeScalingFactors_returnsTrue() {
|
||||
public void isNoOp_forSmallScalingFactorsFlexibleOrientation_returnsTrue() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFitWithFlexibleOrientation(1920, 1072);
|
||||
|
||||
assertThat(lanczosResample.isNoOp(1920, 1080)).isTrue();
|
||||
assertThat(lanczosResample.isNoOp(1080, 1920)).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_forLargeScalingFactors_returnsFalse() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFit(1920, 1068);
|
||||
|
||||
assertThat(lanczosResample.isNoOp(1920, 1080)).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void isNoOp_forLargeScalingFactorsFlexibleOrientation_returnsFalse() {
|
||||
LanczosResample lanczosResample = LanczosResample.scaleToFitWithFlexibleOrientation(1920, 1068);
|
||||
|
||||
assertThat(lanczosResample.isNoOp(1920, 1080)).isFalse();
|
||||
assertThat(lanczosResample.isNoOp(1080, 1920)).isFalse();
|
||||
}
|
||||
|
||||
private static GlTextureInfo setupInputTexture(String path) throws Exception {
|
||||
Bitmap inputBitmap = readBitmap(path);
|
||||
return new GlTextureInfo(
|
||||
|
@ -61,6 +61,8 @@ public final class PresentationPixelTest {
|
||||
|
||||
private static final String ORIGINAL_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/original.png";
|
||||
private static final String ORIGINAL_PORTRAIT_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/original_portrait.png";
|
||||
private static final String ASPECT_RATIO_SCALE_TO_FIT_NARROW_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/aspect_ratio_scale_to_fit_narrow.png";
|
||||
private static final String ASPECT_RATIO_SCALE_TO_FIT_WIDE_PNG_ASSET_PATH =
|
||||
@ -76,6 +78,8 @@ public final class PresentationPixelTest {
|
||||
private static final String HIGH_RESOLUTION_JPG_ASSET_PATH = "media/jpeg/ultraHDR.jpg";
|
||||
private static final String DOWNSCALED_6X_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/PresentationPixelTest/ultraHDR_mipmap_512x680.png";
|
||||
private static final String UPSCALED_2X_PORTRAIT_PNG_ASSET_PATH =
|
||||
"test-generated-goldens/sample_mp4_first_frame/electrical_colors/upscale_2x_portrait.png";
|
||||
|
||||
private final Context context = getApplicationContext();
|
||||
|
||||
@ -282,6 +286,95 @@ public final class PresentationPixelTest {
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void drawFrame_createForShortSide_landscape_noEdits_matchesGoldenFile() throws Exception {
|
||||
presentationShaderProgram =
|
||||
Presentation.createForShortSide(inputHeight)
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
Size outputSize = presentationShaderProgram.configure(inputWidth, inputHeight);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH);
|
||||
|
||||
presentationShaderProgram.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void drawFrame_createForShortSide_portrait_noEdits_matchesGoldenFile() throws Exception {
|
||||
Bitmap inputBitmap = readBitmap(ORIGINAL_PORTRAIT_PNG_ASSET_PATH);
|
||||
inputWidth = inputBitmap.getWidth();
|
||||
inputHeight = inputBitmap.getHeight();
|
||||
inputTexId = createGlTextureFromBitmap(inputBitmap);
|
||||
presentationShaderProgram =
|
||||
Presentation.createForShortSide(inputWidth).toGlShaderProgram(context, /* useHdr= */ false);
|
||||
Size outputSize = presentationShaderProgram.configure(inputWidth, inputHeight);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
presentationShaderProgram.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(inputBitmap, actualBitmap, testId);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void drawFrame_createForShortSide_portrait_upscale_matchesGoldenFile() throws Exception {
|
||||
Bitmap inputBitmap = readBitmap(ORIGINAL_PORTRAIT_PNG_ASSET_PATH);
|
||||
inputWidth = inputBitmap.getWidth();
|
||||
inputHeight = inputBitmap.getHeight();
|
||||
inputTexId = createGlTextureFromBitmap(inputBitmap);
|
||||
presentationShaderProgram =
|
||||
Presentation.createForShortSide(inputWidth * 2)
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
Size outputSize = presentationShaderProgram.configure(inputWidth, inputHeight);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
Bitmap expectedBitmap = readBitmap(UPSCALED_2X_PORTRAIT_PNG_ASSET_PATH);
|
||||
|
||||
presentationShaderProgram.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
// TODO(b/207848601): Switch to using proper tooling for testing against golden data.
|
||||
float averagePixelAbsoluteDifference =
|
||||
getBitmapAveragePixelAbsoluteDifferenceArgb8888(expectedBitmap, actualBitmap, testId);
|
||||
assertThat(averagePixelAbsoluteDifference).isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void drawFrame_createForShortSide_portrait_downscaleWithLinearMipmap_matchesGoldenFile()
|
||||
throws Exception {
|
||||
Bitmap inputBitmap = readBitmap(HIGH_RESOLUTION_JPG_ASSET_PATH);
|
||||
inputWidth = inputBitmap.getWidth();
|
||||
inputHeight = inputBitmap.getHeight();
|
||||
inputTexId = createGlTextureFromBitmap(inputBitmap);
|
||||
presentationShaderProgram =
|
||||
Presentation.createForShortSide(inputWidth / 6)
|
||||
.copyWithTextureMinFilter(C.TEXTURE_MIN_FILTER_LINEAR_MIPMAP_LINEAR)
|
||||
.toGlShaderProgram(context, /* useHdr= */ false);
|
||||
Size outputSize = presentationShaderProgram.configure(inputWidth, inputHeight);
|
||||
setupOutputTexture(outputSize.getWidth(), outputSize.getHeight());
|
||||
Bitmap expectedBitmap = readBitmap(DOWNSCALED_6X_PNG_ASSET_PATH);
|
||||
|
||||
presentationShaderProgram.drawFrame(inputTexId, /* presentationTimeUs= */ 0);
|
||||
Bitmap actualBitmap =
|
||||
createArgb8888BitmapFromFocusedGlFramebuffer(outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
maybeSaveTestBitmap(testId, /* bitmapLabel= */ "actual", actualBitmap, /* path= */ null);
|
||||
assertBitmapsAreSimilar(expectedBitmap, actualBitmap, PSNR_THRESHOLD);
|
||||
}
|
||||
|
||||
private void setupOutputTexture(int outputWidth, int outputHeight) throws GlUtil.GlException {
|
||||
int outputTexId =
|
||||
GlUtil.createTexture(
|
||||
|
@ -68,12 +68,16 @@ public final class DebugViewShaderProgram implements GlShaderProgram {
|
||||
private Executor errorListenerExecutor;
|
||||
|
||||
private @MonotonicNonNull EGLDisplay eglDisplay;
|
||||
private int outputWidth;
|
||||
private int outputHeight;
|
||||
|
||||
public DebugViewShaderProgram(
|
||||
Context context, DebugViewProvider debugViewProvider, ColorInfo outputColorInfo) {
|
||||
this.context = context;
|
||||
this.debugViewProvider = debugViewProvider;
|
||||
this.outputColorInfo = outputColorInfo;
|
||||
this.outputWidth = C.LENGTH_UNSET;
|
||||
this.outputHeight = C.LENGTH_UNSET;
|
||||
inputListener = new InputListener() {};
|
||||
outputListener = new OutputListener() {};
|
||||
errorListener =
|
||||
@ -154,9 +158,13 @@ public final class DebugViewShaderProgram implements GlShaderProgram {
|
||||
eglDisplay = getDefaultEglDisplay();
|
||||
}
|
||||
EGLContext eglContext = GlUtil.getCurrentContext();
|
||||
if (outputWidth == C.LENGTH_UNSET || outputHeight == C.LENGTH_UNSET) {
|
||||
outputWidth = inputWidth;
|
||||
outputHeight = inputHeight;
|
||||
}
|
||||
@Nullable
|
||||
SurfaceView debugSurfaceView =
|
||||
debugViewProvider.getDebugPreviewSurfaceView(inputWidth, inputHeight);
|
||||
debugViewProvider.getDebugPreviewSurfaceView(outputWidth, outputHeight);
|
||||
if (debugSurfaceView != null && !Objects.equals(this.debugSurfaceView, debugSurfaceView)) {
|
||||
debugSurfaceViewWrapper =
|
||||
new SurfaceViewWrapper(
|
||||
@ -164,10 +172,16 @@ public final class DebugViewShaderProgram implements GlShaderProgram {
|
||||
}
|
||||
this.debugSurfaceView = debugSurfaceView;
|
||||
if (defaultShaderProgram == null) {
|
||||
ImmutableList.Builder<GlMatrixTransformation> matrixTransformationListBuilder =
|
||||
new ImmutableList.Builder<>();
|
||||
matrixTransformationListBuilder.add(
|
||||
Presentation.createForWidthAndHeight(
|
||||
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT));
|
||||
|
||||
defaultShaderProgram =
|
||||
DefaultShaderProgram.createApplyingOetf(
|
||||
context,
|
||||
/* matrixTransformations= */ ImmutableList.of(),
|
||||
/* matrixTransformations= */ matrixTransformationListBuilder.build(),
|
||||
/* rgbMatrices= */ ImmutableList.of(),
|
||||
outputColorInfo,
|
||||
outputColorInfo.colorTransfer == C.COLOR_TRANSFER_LINEAR
|
||||
|
@ -201,8 +201,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
InputFrameInfo inputFrameInfo =
|
||||
new InputFrameInfo(
|
||||
textureProducer,
|
||||
inputTexture,
|
||||
presentationTimeUs,
|
||||
new TimedGlTextureInfo(inputTexture, presentationTimeUs),
|
||||
settings.getOverlaySettings(inputIndex, presentationTimeUs));
|
||||
inputSource.frameInfos.add(inputFrameInfo);
|
||||
|
||||
@ -260,13 +259,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
// nextTimestampToComposite.
|
||||
@Nullable InputFrameInfo nextPrimaryFrame = primaryInputSource.frameInfos.peek();
|
||||
long nextTimestampToComposite =
|
||||
nextPrimaryFrame != null ? nextPrimaryFrame.presentationTimeUs : C.TIME_UNSET;
|
||||
nextPrimaryFrame != null
|
||||
? nextPrimaryFrame.timedGlTextureInfo.presentationTimeUs
|
||||
: C.TIME_UNSET;
|
||||
|
||||
int numberOfSecondaryFramesBeforeOrAtNextTargetTimestamp =
|
||||
Iterables.size(
|
||||
Iterables.filter(
|
||||
secondaryInputSource.frameInfos,
|
||||
frame -> frame.presentationTimeUs <= nextTimestampToComposite));
|
||||
frame -> frame.timedGlTextureInfo.presentationTimeUs <= nextTimestampToComposite));
|
||||
releaseFrames(
|
||||
secondaryInputSource,
|
||||
/* numberOfFramesToRelease= */ max(
|
||||
@ -277,7 +278,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
for (int i = 0; i < numberOfFramesToRelease; i++) {
|
||||
InputFrameInfo frameInfoToRelease = inputSource.frameInfos.remove();
|
||||
frameInfoToRelease.textureProducer.releaseOutputTexture(
|
||||
frameInfoToRelease.presentationTimeUs);
|
||||
frameInfoToRelease.timedGlTextureInfo.presentationTimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,7 +303,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
|
||||
ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
|
||||
for (int i = 0; i < framesToComposite.size(); i++) {
|
||||
GlTextureInfo texture = framesToComposite.get(i).texture;
|
||||
GlTextureInfo texture = framesToComposite.get(i).timedGlTextureInfo.glTextureInfo;
|
||||
inputSizes.add(new Size(texture.width, texture.height));
|
||||
}
|
||||
Size outputSize = settings.getOutputSize(inputSizes.build());
|
||||
@ -310,7 +311,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
glObjectsProvider, outputSize.getWidth(), outputSize.getHeight());
|
||||
|
||||
GlTextureInfo outputTexture = outputTexturePool.useTexture();
|
||||
long outputPresentationTimestampUs = primaryInputFrame.presentationTimeUs;
|
||||
long outputPresentationTimestampUs = primaryInputFrame.timedGlTextureInfo.presentationTimeUs;
|
||||
outputTextureTimestamps.add(outputPresentationTimestampUs);
|
||||
|
||||
compositorGlProgram.drawFrame(framesToComposite, outputTexture);
|
||||
@ -369,16 +370,18 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
Iterator<InputFrameInfo> frameInfosIterator = secondaryInputSource.frameInfos.iterator();
|
||||
while (frameInfosIterator.hasNext()) {
|
||||
InputFrameInfo candidateFrame = frameInfosIterator.next();
|
||||
long candidateTimestampUs = candidateFrame.presentationTimeUs;
|
||||
long candidateTimestampUs = candidateFrame.timedGlTextureInfo.presentationTimeUs;
|
||||
long candidateAbsDistance =
|
||||
abs(candidateTimestampUs - primaryFrameToComposite.presentationTimeUs);
|
||||
abs(
|
||||
candidateTimestampUs
|
||||
- primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs);
|
||||
|
||||
if (candidateAbsDistance < minTimeDiffFromPrimaryUs) {
|
||||
minTimeDiffFromPrimaryUs = candidateAbsDistance;
|
||||
secondaryFrameToComposite = candidateFrame;
|
||||
}
|
||||
|
||||
if (candidateTimestampUs > primaryFrameToComposite.presentationTimeUs
|
||||
if (candidateTimestampUs > primaryFrameToComposite.timedGlTextureInfo.presentationTimeUs
|
||||
|| (!frameInfosIterator.hasNext() && secondaryInputSource.isInputEnded)) {
|
||||
framesToComposite.add(checkNotNull(secondaryFrameToComposite));
|
||||
break;
|
||||
@ -503,7 +506,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
|
||||
private void blendOntoFocusedTexture(InputFrameInfo inputFrameInfo) throws GlUtil.GlException {
|
||||
GlProgram glProgram = checkNotNull(this.glProgram);
|
||||
GlTextureInfo inputTexture = inputFrameInfo.texture;
|
||||
GlTextureInfo inputTexture = inputFrameInfo.timedGlTextureInfo.glTextureInfo;
|
||||
glProgram.setSamplerTexIdUniform("uTexSampler", inputTexture.texId, /* texUnitIndex= */ 0);
|
||||
float[] transformationMatrix =
|
||||
overlayMatrixProvider.getTransformationMatrix(
|
||||
@ -537,18 +540,15 @@ public final class DefaultVideoCompositor implements VideoCompositor {
|
||||
/** Holds information on a frame and how to release it. */
|
||||
private static final class InputFrameInfo {
|
||||
public final GlTextureProducer textureProducer;
|
||||
public final GlTextureInfo texture;
|
||||
public final long presentationTimeUs;
|
||||
public final TimedGlTextureInfo timedGlTextureInfo;
|
||||
public final OverlaySettings overlaySettings;
|
||||
|
||||
public InputFrameInfo(
|
||||
GlTextureProducer textureProducer,
|
||||
GlTextureInfo texture,
|
||||
long presentationTimeUs,
|
||||
TimedGlTextureInfo timedGlTextureInfo,
|
||||
OverlaySettings overlaySettings) {
|
||||
this.textureProducer = textureProducer;
|
||||
this.texture = texture;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
this.timedGlTextureInfo = timedGlTextureInfo;
|
||||
this.overlaySettings = overlaySettings;
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
import static androidx.media3.common.util.GlUtil.getDefaultEglDisplay;
|
||||
import static androidx.media3.common.util.Util.castNonNull;
|
||||
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_VFP;
|
||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_RECEIVE_END_OF_ALL_INPUT;
|
||||
import static androidx.media3.effect.DebugTraceUtil.EVENT_REGISTER_NEW_INPUT_STREAM;
|
||||
@ -157,6 +158,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
private @MonotonicNonNull GlObjectsProvider glObjectsProvider;
|
||||
private GlTextureProducer.@MonotonicNonNull Listener textureOutputListener;
|
||||
private int textureOutputCapacity;
|
||||
private boolean enableReplayableCache;
|
||||
private boolean requireRegisteringAllInputFrames;
|
||||
private boolean experimentalAdjustSurfaceTextureTransformationMatrix;
|
||||
private boolean experimentalRepeatInputBitmapWithoutResampling;
|
||||
@ -175,6 +177,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
glObjectsProvider = factory.glObjectsProvider;
|
||||
textureOutputListener = factory.textureOutputListener;
|
||||
textureOutputCapacity = factory.textureOutputCapacity;
|
||||
enableReplayableCache = factory.enableReplayableCache;
|
||||
requireRegisteringAllInputFrames = !factory.repeatLastRegisteredFrame;
|
||||
experimentalAdjustSurfaceTextureTransformationMatrix =
|
||||
factory.experimentalAdjustSurfaceTextureTransformationMatrix;
|
||||
@ -265,6 +268,22 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether to use a frame cache to {@link DefaultVideoFrameProcessor#redraw} frames.
|
||||
*
|
||||
* <p>The default value is {@code false}, in this case calling {@link
|
||||
* VideoFrameProcessor#redraw} throws {@link UnsupportedOperationException}.
|
||||
*
|
||||
* <p>Using a frame cache enables precise redrawing, but increases resource and power usages.
|
||||
*
|
||||
* @param enableReplayableCache Whether to use a frame cache.
|
||||
*/
|
||||
@CanIgnoreReturnValue
|
||||
public Builder setEnableReplayableCache(boolean enableReplayableCache) {
|
||||
this.enableReplayableCache = enableReplayableCache;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets texture output settings.
|
||||
*
|
||||
@ -340,6 +359,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
executorService,
|
||||
textureOutputListener,
|
||||
textureOutputCapacity,
|
||||
enableReplayableCache,
|
||||
experimentalAdjustSurfaceTextureTransformationMatrix,
|
||||
experimentalRepeatInputBitmapWithoutResampling);
|
||||
}
|
||||
@ -351,6 +371,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
@Nullable private final ExecutorService executorService;
|
||||
@Nullable private final GlTextureProducer.Listener textureOutputListener;
|
||||
private final int textureOutputCapacity;
|
||||
private final boolean enableReplayableCache;
|
||||
private final boolean experimentalAdjustSurfaceTextureTransformationMatrix;
|
||||
private final boolean experimentalRepeatInputBitmapWithoutResampling;
|
||||
|
||||
@ -361,6 +382,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
@Nullable ExecutorService executorService,
|
||||
@Nullable GlTextureProducer.Listener textureOutputListener,
|
||||
int textureOutputCapacity,
|
||||
boolean enableReplayableCache,
|
||||
boolean experimentalAdjustSurfaceTextureTransformationMatrix,
|
||||
boolean experimentalRepeatInputBitmapWithoutResampling) {
|
||||
this.sdrWorkingColorSpace = sdrWorkingColorSpace;
|
||||
@ -369,6 +391,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
this.executorService = executorService;
|
||||
this.textureOutputListener = textureOutputListener;
|
||||
this.textureOutputCapacity = textureOutputCapacity;
|
||||
this.enableReplayableCache = enableReplayableCache;
|
||||
this.experimentalAdjustSurfaceTextureTransformationMatrix =
|
||||
experimentalAdjustSurfaceTextureTransformationMatrix;
|
||||
this.experimentalRepeatInputBitmapWithoutResampling =
|
||||
@ -437,6 +460,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
listener,
|
||||
instanceGlObjectsProvider,
|
||||
shouldReleaseGlObjectsProvider,
|
||||
enableReplayableCache,
|
||||
textureOutputListener,
|
||||
textureOutputCapacity,
|
||||
repeatLastRegisteredFrame,
|
||||
@ -492,6 +516,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
private final Object lock;
|
||||
private final ColorInfo outputColorInfo;
|
||||
private final DebugViewProvider debugViewProvider;
|
||||
@Nullable private final ReplayableFrameCacheGlShaderProgram frameCache;
|
||||
|
||||
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
|
||||
private volatile boolean inputStreamEnded;
|
||||
@ -508,7 +533,8 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
FinalShaderProgramWrapper finalShaderProgramWrapper,
|
||||
boolean renderFramesAutomatically,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider) {
|
||||
DebugViewProvider debugViewProvider,
|
||||
@Nullable ReplayableFrameCacheGlShaderProgram frameCache) {
|
||||
this.context = context;
|
||||
this.glObjectsProvider = glObjectsProvider;
|
||||
this.shouldReleaseGlObjectsProvider = shouldReleaseGlObjectsProvider;
|
||||
@ -521,18 +547,30 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
this.activeEffects = new ArrayList<>();
|
||||
this.lock = new Object();
|
||||
this.outputColorInfo = outputColorInfo;
|
||||
this.frameCache = frameCache;
|
||||
this.debugViewProvider = debugViewProvider;
|
||||
this.finalShaderProgramWrapper = finalShaderProgramWrapper;
|
||||
this.intermediateGlShaderPrograms = new ArrayList<>();
|
||||
this.inputStreamRegisteredCondition = new ConditionVariable();
|
||||
inputStreamRegisteredCondition.open();
|
||||
this.finalShaderProgramWrapper.setOnInputStreamProcessedListener(
|
||||
() -> {
|
||||
if (inputStreamEnded) {
|
||||
listenerExecutor.execute(listener::onEnded);
|
||||
DebugTraceUtil.logEvent(COMPONENT_VFP, EVENT_SIGNAL_ENDED, C.TIME_END_OF_SOURCE);
|
||||
} else {
|
||||
submitPendingInputStream();
|
||||
this.finalShaderProgramWrapper.setListener(
|
||||
new FinalShaderProgramWrapper.Listener() {
|
||||
@Override
|
||||
public void onInputStreamProcessed() {
|
||||
if (inputStreamEnded) {
|
||||
listenerExecutor.execute(listener::onEnded);
|
||||
DebugTraceUtil.logEvent(COMPONENT_VFP, EVENT_SIGNAL_ENDED, C.TIME_END_OF_SOURCE);
|
||||
} else {
|
||||
DefaultVideoFrameProcessor.this.submitPendingInputStream();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameRendered(long presentationTimeUs) {
|
||||
if (frameCache == null) {
|
||||
return;
|
||||
}
|
||||
frameCache.onFrameRendered(presentationTimeUs);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -566,6 +604,12 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
inputSwitcher.setInputDefaultBufferSize(width, height);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The passed in {@link Bitmap} argument cannot be reused after this method returns {@code
|
||||
* true}, as it will be {@linkplain Bitmap#recycle recycled} by the processing pipeline.
|
||||
*/
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
||||
checkState(!inputStreamEnded);
|
||||
@ -618,6 +662,33 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
return inputSwitcher.getInputSurface();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>{@code DefaultVideoFrameProcessor} keeps track of the redraw requests received. If a call to
|
||||
* redraw is made when another redraw request is ongoing, the new request will be performed later
|
||||
* when the ongoing redraw completes, and this method will return immediately.
|
||||
*/
|
||||
@Override
|
||||
public void redraw() {
|
||||
if (frameCache == null) {
|
||||
throw new UnsupportedOperationException(
|
||||
"Replaying when enableReplayableCache is set to false");
|
||||
}
|
||||
// TODO: b/391109644 - Call listener method in VideoFrameMetadataListener and debounce
|
||||
// accordingly.
|
||||
if (frameCache.isEmpty()) {
|
||||
// Don't redraw right after flush, because the frame cache is also be flushed and it's empty.
|
||||
return;
|
||||
}
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
finalShaderProgramWrapper.prepareToRedraw(
|
||||
castNonNull(frameCache).getReplayFramePresentationTimeUs());
|
||||
frameCache.replayFrame();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
@ -842,6 +913,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
Listener listener,
|
||||
GlObjectsProvider glObjectsProvider,
|
||||
boolean shouldReleaseGlObjectsProvider,
|
||||
boolean enableReplayableCache,
|
||||
@Nullable GlTextureProducer.Listener textureOutputListener,
|
||||
int textureOutputCapacity,
|
||||
boolean repeatLastRegisteredFrame,
|
||||
@ -849,8 +921,9 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
boolean experimentalRepeatInputBitmapWithoutResampling)
|
||||
throws GlUtil.GlException, VideoFrameProcessingException {
|
||||
EGLDisplay eglDisplay = getDefaultEglDisplay();
|
||||
boolean isOutputTransferHdr = ColorInfo.isTransferHdr(outputColorInfo);
|
||||
int[] configAttributes =
|
||||
ColorInfo.isTransferHdr(outputColorInfo)
|
||||
isOutputTransferHdr
|
||||
? GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_1010102
|
||||
: GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
|
||||
Pair<EGLContext, EGLSurface> eglContextAndPlaceholderSurface =
|
||||
@ -863,7 +936,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
.setHdrStaticInfo(null)
|
||||
.build();
|
||||
ColorInfo intermediateColorInfo =
|
||||
ColorInfo.isTransferHdr(outputColorInfo)
|
||||
isOutputTransferHdr
|
||||
? linearColorInfo
|
||||
: sdrWorkingColorSpace == WORKING_COLOR_SPACE_LINEAR
|
||||
? linearColorInfo
|
||||
@ -908,7 +981,10 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
finalShaderProgramWrapper,
|
||||
renderFramesAutomatically,
|
||||
outputColorInfo,
|
||||
debugViewProvider);
|
||||
debugViewProvider,
|
||||
enableReplayableCache
|
||||
? new ReplayableFrameCacheGlShaderProgram(context, /* useHdr= */ isOutputTransferHdr)
|
||||
: null);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1031,18 +1107,25 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
|
||||
if (forceReconfigure || !activeEffects.equals(inputStreamInfo.effects)) {
|
||||
if (!intermediateGlShaderPrograms.isEmpty()) {
|
||||
for (int i = 0; i < intermediateGlShaderPrograms.size(); i++) {
|
||||
// If frameCache is present, it's the first item in the list, skip releasing it.
|
||||
int startIndex = frameCache == null ? 0 : 1;
|
||||
for (int i = startIndex; i < intermediateGlShaderPrograms.size(); i++) {
|
||||
intermediateGlShaderPrograms.get(i).release();
|
||||
}
|
||||
intermediateGlShaderPrograms.clear();
|
||||
}
|
||||
|
||||
if (frameCache != null) {
|
||||
intermediateGlShaderPrograms.add(frameCache);
|
||||
}
|
||||
|
||||
ImmutableList.Builder<Effect> effectsListBuilder =
|
||||
new ImmutableList.Builder<Effect>().addAll(inputStreamInfo.effects);
|
||||
if (debugViewProvider != DebugViewProvider.NONE) {
|
||||
effectsListBuilder.add(new DebugViewEffect(debugViewProvider, outputColorInfo));
|
||||
}
|
||||
// The GlShaderPrograms that should be inserted in between InputSwitcher and
|
||||
|
||||
// The GlShaderPrograms that should be inserted in between the frame cache and
|
||||
// FinalShaderProgramWrapper.
|
||||
intermediateGlShaderPrograms.addAll(
|
||||
createGlShaderPrograms(
|
||||
|
@ -28,7 +28,6 @@ import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.util.Pair;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
@ -42,11 +41,12 @@ import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.Log;
|
||||
import androidx.media3.common.util.LongArrayQueue;
|
||||
import androidx.media3.common.util.Size;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.common.util.SystemClock;
|
||||
import androidx.media3.effect.DefaultVideoFrameProcessor.WorkingColorSpace;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.Executor;
|
||||
@ -67,8 +67,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
*/
|
||||
/* package */ final class FinalShaderProgramWrapper implements GlShaderProgram, GlTextureProducer {
|
||||
|
||||
interface OnInputStreamProcessedListener {
|
||||
public interface Listener {
|
||||
/**
|
||||
* Called when one input stream is fully processed following {@link
|
||||
* #signalEndOfCurrentInputStream()}.
|
||||
*/
|
||||
void onInputStreamProcessed();
|
||||
|
||||
/** Called when a frame is rendered to the output surface. */
|
||||
void onFrameRendered(long presentationTimeUs);
|
||||
}
|
||||
|
||||
private static final String TAG = "FinalShaderWrapper";
|
||||
@ -86,7 +93,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||
private final Executor videoFrameProcessorListenerExecutor;
|
||||
private final VideoFrameProcessor.Listener videoFrameProcessorListener;
|
||||
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
|
||||
private final Queue<TimedGlTextureInfo> availableFrames;
|
||||
private final TexturePool outputTexturePool;
|
||||
private final LongArrayQueue outputTextureTimestamps; // Synchronized with outputTexturePool.
|
||||
private final LongArrayQueue syncObjects;
|
||||
@ -104,11 +111,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
private boolean isInputStreamEndedWithPendingAvailableFrames;
|
||||
private InputListener inputListener;
|
||||
private @MonotonicNonNull Size outputSizeBeforeSurfaceTransformation;
|
||||
@Nullable private OnInputStreamProcessedListener onInputStreamProcessedListener;
|
||||
private @MonotonicNonNull Listener listener;
|
||||
private boolean matrixTransformationsChanged;
|
||||
private boolean outputSurfaceInfoChanged;
|
||||
@Nullable private SurfaceInfo outputSurfaceInfo;
|
||||
|
||||
private long redrawFramePresentationTimeUs;
|
||||
|
||||
/** Wraps the {@link Surface} in {@link #outputSurfaceInfo}. */
|
||||
@Nullable private EGLSurface outputEglSurface;
|
||||
|
||||
@ -121,7 +130,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor,
|
||||
Executor videoFrameProcessorListenerExecutor,
|
||||
VideoFrameProcessor.Listener videoFrameProcessorListener,
|
||||
@Nullable Listener textureOutputListener,
|
||||
@Nullable GlTextureProducer.Listener textureOutputListener,
|
||||
int textureOutputCapacity,
|
||||
@WorkingColorSpace int sdrWorkingColorSpace,
|
||||
boolean renderFramesAutomatically) {
|
||||
@ -146,6 +155,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
outputTexturePool = new TexturePool(useHighPrecisionColorComponents, textureOutputCapacity);
|
||||
outputTextureTimestamps = new LongArrayQueue(textureOutputCapacity);
|
||||
syncObjects = new LongArrayQueue(textureOutputCapacity);
|
||||
redrawFramePresentationTimeUs = C.TIME_UNSET;
|
||||
}
|
||||
|
||||
// GlTextureProducer interface. Can be called on any thread.
|
||||
@ -189,17 +199,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public void setOnInputStreamProcessedListener(
|
||||
@Nullable OnInputStreamProcessedListener onInputStreamProcessedListener) {
|
||||
public void setListener(Listener listener) {
|
||||
videoFrameProcessingTaskExecutor.verifyVideoFrameProcessingThread();
|
||||
this.onInputStreamProcessedListener = onInputStreamProcessedListener;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void signalEndOfCurrentInputStream() {
|
||||
videoFrameProcessingTaskExecutor.verifyVideoFrameProcessingThread();
|
||||
if (availableFrames.isEmpty()) {
|
||||
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
|
||||
checkNotNull(listener).onInputStreamProcessed();
|
||||
isInputStreamEndedWithPendingAvailableFrames = false;
|
||||
} else {
|
||||
checkState(!renderFramesAutomatically);
|
||||
@ -211,8 +220,15 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
public void queueInputFrame(
|
||||
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
|
||||
videoFrameProcessingTaskExecutor.verifyVideoFrameProcessingThread();
|
||||
videoFrameProcessorListenerExecutor.execute(
|
||||
() -> videoFrameProcessorListener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
||||
|
||||
if (!isWaitingForRedrawFrame()) {
|
||||
// Don't report output available when redrawing - the redrawn frames are released immediately.
|
||||
videoFrameProcessorListenerExecutor.execute(
|
||||
() ->
|
||||
videoFrameProcessorListener.onOutputFrameAvailableForRendering(
|
||||
presentationTimeUs, /* isRedrawnFrame= */ false));
|
||||
}
|
||||
|
||||
if (textureOutputListener == null) {
|
||||
if (renderFramesAutomatically) {
|
||||
renderFrame(
|
||||
@ -221,7 +237,27 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
presentationTimeUs,
|
||||
/* renderTimeNs= */ presentationTimeUs * 1000);
|
||||
} else {
|
||||
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
||||
availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
|
||||
if (isWaitingForRedrawFrame()) {
|
||||
if (presentationTimeUs == redrawFramePresentationTimeUs) {
|
||||
redrawFramePresentationTimeUs = C.TIME_UNSET;
|
||||
videoFrameProcessorListenerExecutor.execute(
|
||||
() ->
|
||||
videoFrameProcessorListener.onOutputFrameAvailableForRendering(
|
||||
presentationTimeUs, /* isRedrawnFrame= */ true));
|
||||
renderFrame(
|
||||
glObjectsProvider,
|
||||
inputTexture,
|
||||
presentationTimeUs,
|
||||
/* renderTimeNs= */ SystemClock.DEFAULT.nanoTime());
|
||||
availableFrames.clear();
|
||||
} else {
|
||||
// Skip other frames when waiting for the replay frame to arrive, so that the producer
|
||||
// can continue processing, but keep it in the availableFrames for the player to call
|
||||
// renderFrame.
|
||||
inputListener.onInputFrameProcessed(inputTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
inputListener.onReadyToAcceptInputFrame();
|
||||
} else {
|
||||
@ -307,14 +343,21 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
return;
|
||||
}
|
||||
checkState(!renderFramesAutomatically);
|
||||
Pair<GlTextureInfo, Long> oldestAvailableFrame = availableFrames.remove();
|
||||
if (availableFrames.isEmpty()) {
|
||||
// This only happens with redrawn frame. The available output frame notification on the player
|
||||
// side runs on another thread and when redrawing rapidly, the player could receive an output
|
||||
// frame from a previous redraw.
|
||||
return;
|
||||
}
|
||||
|
||||
TimedGlTextureInfo oldestAvailableFrame = availableFrames.remove();
|
||||
renderFrame(
|
||||
glObjectsProvider,
|
||||
/* inputTexture= */ oldestAvailableFrame.first,
|
||||
/* presentationTimeUs= */ oldestAvailableFrame.second,
|
||||
oldestAvailableFrame.glTextureInfo,
|
||||
oldestAvailableFrame.presentationTimeUs,
|
||||
renderTimeNs);
|
||||
if (availableFrames.isEmpty() && isInputStreamEndedWithPendingAvailableFrames) {
|
||||
checkNotNull(onInputStreamProcessedListener).onInputStreamProcessed();
|
||||
checkNotNull(listener).onInputStreamProcessed();
|
||||
isInputStreamEndedWithPendingAvailableFrames = false;
|
||||
}
|
||||
}
|
||||
@ -335,12 +378,20 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
}
|
||||
|
||||
/* package */ void prepareToRedraw(long redrawFramePresentationTimeUs) {
|
||||
this.redrawFramePresentationTimeUs = redrawFramePresentationTimeUs;
|
||||
for (int i = 0; i < availableFrames.size(); i++) {
|
||||
TimedGlTextureInfo availableFrame = availableFrames.remove();
|
||||
inputListener.onInputFrameProcessed(availableFrame.glTextureInfo);
|
||||
}
|
||||
}
|
||||
|
||||
/** Must be called on the GL thread. */
|
||||
private void setOutputSurfaceInfoInternal(@Nullable SurfaceInfo outputSurfaceInfo) {
|
||||
if (textureOutputListener != null) {
|
||||
return;
|
||||
}
|
||||
if (Util.areEqual(this.outputSurfaceInfo, outputSurfaceInfo)) {
|
||||
if (Objects.equals(this.outputSurfaceInfo, outputSurfaceInfo)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -389,6 +440,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isWaitingForRedrawFrame() {
|
||||
return redrawFramePresentationTimeUs != C.TIME_UNSET;
|
||||
}
|
||||
|
||||
private void renderFrame(
|
||||
GlObjectsProvider glObjectsProvider,
|
||||
GlTextureInfo inputTexture,
|
||||
@ -396,8 +451,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
long renderTimeNs) {
|
||||
try {
|
||||
if (renderTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME
|
||||
|| !ensureConfigured(glObjectsProvider, inputTexture.width, inputTexture.height)) {
|
||||
|| !ensureConfigured(glObjectsProvider, inputTexture.width, inputTexture.height)
|
||||
|| (isWaitingForRedrawFrame() && presentationTimeUs != redrawFramePresentationTimeUs)) {
|
||||
inputListener.onInputFrameProcessed(inputTexture);
|
||||
if (renderTimeNs == VideoFrameProcessor.DROP_OUTPUT_FRAME) {
|
||||
checkNotNull(listener).onFrameRendered(presentationTimeUs);
|
||||
}
|
||||
return; // Drop frames when requested, or there is no output surface and output texture.
|
||||
}
|
||||
if (outputSurfaceInfo != null) {
|
||||
@ -443,6 +502,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, outputEglSurface, eglPresentationTimeNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
|
||||
checkNotNull(listener).onFrameRendered(presentationTimeUs);
|
||||
DebugTraceUtil.logEvent(COMPONENT_VFP, EVENT_RENDERED_TO_OUTPUT_SURFACE, presentationTimeUs);
|
||||
}
|
||||
|
||||
@ -479,7 +539,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
this.inputHeight = inputHeight;
|
||||
Size outputSizeBeforeSurfaceTransformation =
|
||||
MatrixUtils.configureAndGetOutputSize(inputWidth, inputHeight, matrixTransformations);
|
||||
if (!Util.areEqual(
|
||||
if (!Objects.equals(
|
||||
this.outputSizeBeforeSurfaceTransformation, outputSizeBeforeSurfaceTransformation)) {
|
||||
this.outputSizeBeforeSurfaceTransformation = outputSizeBeforeSurfaceTransformation;
|
||||
videoFrameProcessorListenerExecutor.execute(
|
||||
|
@ -15,7 +15,6 @@
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import android.util.Pair;
|
||||
import androidx.annotation.GuardedBy;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
@ -39,7 +38,7 @@ import java.util.Queue;
|
||||
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;
|
||||
|
||||
@GuardedBy("this")
|
||||
private final Queue<Pair<GlTextureInfo, Long>> availableFrames;
|
||||
private final Queue<TimedGlTextureInfo> availableFrames;
|
||||
|
||||
@GuardedBy("this")
|
||||
private int consumingGlShaderProgramInputCapacity;
|
||||
@ -63,7 +62,7 @@ import java.util.Queue;
|
||||
|
||||
@Override
|
||||
public synchronized void onReadyToAcceptInputFrame() {
|
||||
@Nullable Pair<GlTextureInfo, Long> pendingFrame = availableFrames.poll();
|
||||
@Nullable TimedGlTextureInfo pendingFrame = availableFrames.poll();
|
||||
if (pendingFrame == null) {
|
||||
consumingGlShaderProgramInputCapacity++;
|
||||
return;
|
||||
@ -72,11 +71,9 @@ import java.util.Queue;
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() ->
|
||||
consumingGlShaderProgram.queueInputFrame(
|
||||
glObjectsProvider,
|
||||
/* inputTexture= */ pendingFrame.first,
|
||||
/* presentationTimeUs= */ pendingFrame.second));
|
||||
@Nullable Pair<GlTextureInfo, Long> nextPendingFrame = availableFrames.peek();
|
||||
if (nextPendingFrame != null && nextPendingFrame.second == C.TIME_END_OF_SOURCE) {
|
||||
glObjectsProvider, pendingFrame.glTextureInfo, pendingFrame.presentationTimeUs));
|
||||
@Nullable TimedGlTextureInfo nextPendingFrame = availableFrames.peek();
|
||||
if (nextPendingFrame != null && nextPendingFrame.presentationTimeUs == C.TIME_END_OF_SOURCE) {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
consumingGlShaderProgram::signalEndOfCurrentInputStream);
|
||||
availableFrames.remove();
|
||||
@ -97,7 +94,7 @@ import java.util.Queue;
|
||||
glObjectsProvider, inputTexture, presentationTimeUs));
|
||||
consumingGlShaderProgramInputCapacity--;
|
||||
} else {
|
||||
availableFrames.add(Pair.create(inputTexture, presentationTimeUs));
|
||||
availableFrames.add(new TimedGlTextureInfo(inputTexture, presentationTimeUs));
|
||||
}
|
||||
}
|
||||
|
||||
@ -107,7 +104,7 @@ import java.util.Queue;
|
||||
*/
|
||||
public synchronized void signalEndOfCurrentStream() {
|
||||
if (!availableFrames.isEmpty()) {
|
||||
availableFrames.add(Pair.create(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
|
||||
availableFrames.add(new TimedGlTextureInfo(GlTextureInfo.UNSET, C.TIME_END_OF_SOURCE));
|
||||
} else {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
consumingGlShaderProgram::signalEndOfCurrentInputStream);
|
||||
|
@ -43,8 +43,9 @@ public final class LanczosResample implements GlEffect {
|
||||
private static final float NO_OP_THRESHOLD = 0.01f;
|
||||
|
||||
private final float radius;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final int longSide;
|
||||
private final int shortSide;
|
||||
private final boolean assumeLandscapeOrientation;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
@ -56,20 +57,56 @@ public final class LanczosResample implements GlEffect {
|
||||
@IntRange(from = 1) int width, @IntRange(from = 1) int height) {
|
||||
checkArgument(width > 0);
|
||||
checkArgument(height > 0);
|
||||
return new LanczosResample(DEFAULT_RADIUS, width, height);
|
||||
return new LanczosResample(
|
||||
DEFAULT_RADIUS, width, height, /* assumeLandscapeOrientation= */ true);
|
||||
}
|
||||
|
||||
private LanczosResample(float radius, int width, int height) {
|
||||
/**
|
||||
* Creates an instance.
|
||||
*
|
||||
* <p>The output resolution will be either {@code firstDimension} x {@code secondDimension} or
|
||||
* {@code secondDimension} x {@code firstDimension}. The longer of {@code firstDimension} or
|
||||
* {@code secondDimension} will have the same orientation as the longer side of the {@link Size}
|
||||
* passed in to {@link LanczosResampleScaledFunctionProvider#configure}.
|
||||
*
|
||||
* @param firstDimension The first dimension of the output contents.
|
||||
* @param secondDimension The second dimension of the output contents.
|
||||
*/
|
||||
public static LanczosResample scaleToFitWithFlexibleOrientation(
|
||||
@IntRange(from = 1) int firstDimension, @IntRange(from = 1) int secondDimension) {
|
||||
checkArgument(firstDimension > 0);
|
||||
checkArgument(secondDimension > 0);
|
||||
if (firstDimension > secondDimension) {
|
||||
return new LanczosResample(
|
||||
DEFAULT_RADIUS,
|
||||
/* longSide= */ firstDimension,
|
||||
/* shortSide= */ secondDimension,
|
||||
/* assumeLandscapeOrientation= */ false);
|
||||
} else {
|
||||
return new LanczosResample(
|
||||
DEFAULT_RADIUS,
|
||||
/* longSide= */ secondDimension,
|
||||
/* shortSide= */ firstDimension,
|
||||
/* assumeLandscapeOrientation= */ false);
|
||||
}
|
||||
}
|
||||
|
||||
private LanczosResample(
|
||||
float radius, int longSide, int shortSide, boolean assumeLandscapeOrientation) {
|
||||
this.radius = radius;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.longSide = longSide;
|
||||
this.shortSide = shortSide;
|
||||
this.assumeLandscapeOrientation = assumeLandscapeOrientation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GlShaderProgram toGlShaderProgram(Context context, boolean useHdr)
|
||||
throws VideoFrameProcessingException {
|
||||
return new SeparableConvolutionShaderProgram(
|
||||
context, useHdr, new LanczosResampleScaledFunctionProvider(radius, width, height));
|
||||
context,
|
||||
useHdr,
|
||||
new LanczosResampleScaledFunctionProvider(
|
||||
radius, longSide, shortSide, assumeLandscapeOrientation));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -80,7 +117,13 @@ public final class LanczosResample implements GlEffect {
|
||||
*/
|
||||
@Override
|
||||
public boolean isNoOp(int inputWidth, int inputHeight) {
|
||||
return abs(scalingFactorToFit(inputWidth, inputHeight, width, height) - 1f) < NO_OP_THRESHOLD;
|
||||
Size targetSize =
|
||||
getTargetSize(inputWidth, inputHeight, longSide, shortSide, assumeLandscapeOrientation);
|
||||
return abs(
|
||||
scalingFactorToFit(
|
||||
inputWidth, inputHeight, targetSize.getWidth(), targetSize.getHeight())
|
||||
- 1f)
|
||||
< NO_OP_THRESHOLD;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -108,21 +151,24 @@ public final class LanczosResample implements GlEffect {
|
||||
// Note: We deliberately don't use Float.MIN_VALUE because it's positive & very close to zero.
|
||||
private static final float SCALE_UNSET = -Float.MAX_VALUE;
|
||||
private final float radius;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final int longSide;
|
||||
private final int shortSide;
|
||||
private final boolean assumeLandscapeOrientation;
|
||||
|
||||
private float scale;
|
||||
|
||||
private LanczosResampleScaledFunctionProvider(
|
||||
@FloatRange(from = 0, fromInclusive = false) float radius,
|
||||
@IntRange(from = 1) int width,
|
||||
@IntRange(from = 1) int height) {
|
||||
@IntRange(from = 1) int longSide,
|
||||
@IntRange(from = 1) int shortSide,
|
||||
boolean assumeLandscapeOrientation) {
|
||||
checkArgument(radius > 0);
|
||||
checkArgument(width > 0);
|
||||
checkArgument(height > 0);
|
||||
checkArgument(longSide > 0);
|
||||
checkArgument(shortSide > 0);
|
||||
this.radius = radius;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.longSide = longSide;
|
||||
this.shortSide = shortSide;
|
||||
this.assumeLandscapeOrientation = assumeLandscapeOrientation;
|
||||
scale = SCALE_UNSET;
|
||||
}
|
||||
|
||||
@ -136,8 +182,33 @@ public final class LanczosResample implements GlEffect {
|
||||
|
||||
@Override
|
||||
public Size configure(Size inputSize) {
|
||||
scale = scalingFactorToFit(inputSize.getWidth(), inputSize.getHeight(), width, height);
|
||||
Size targetSize =
|
||||
LanczosResample.getTargetSize(
|
||||
inputSize.getWidth(),
|
||||
inputSize.getHeight(),
|
||||
longSide,
|
||||
shortSide,
|
||||
assumeLandscapeOrientation);
|
||||
scale =
|
||||
scalingFactorToFit(
|
||||
inputSize.getWidth(),
|
||||
inputSize.getHeight(),
|
||||
targetSize.getWidth(),
|
||||
targetSize.getHeight());
|
||||
return new Size(round(inputSize.getWidth() * scale), round(inputSize.getHeight() * scale));
|
||||
}
|
||||
}
|
||||
|
||||
private static Size getTargetSize(
|
||||
int inputWidth,
|
||||
int inputHeight,
|
||||
int longSide,
|
||||
int shortSide,
|
||||
boolean assumeLandscapeOrientation) {
|
||||
if (assumeLandscapeOrientation || inputWidth > inputHeight) {
|
||||
return new Size(longSide, shortSide);
|
||||
} else {
|
||||
return new Size(shortSide, longSide);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,10 +31,12 @@ import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_TEXTURE_RENDERE
|
||||
import static java.util.concurrent.TimeUnit.MILLISECONDS;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.util.SparseArray;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.IntRange;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
@ -44,6 +46,7 @@ import androidx.media3.common.Effect;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.OnInputFrameProcessedListener;
|
||||
import androidx.media3.common.SurfaceInfo;
|
||||
import androidx.media3.common.VideoCompositorSettings;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
@ -51,6 +54,7 @@ import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.VideoGraph;
|
||||
import androidx.media3.common.util.GlUtil.GlException;
|
||||
import androidx.media3.common.util.Log;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.util.ArrayDeque;
|
||||
@ -64,7 +68,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
/** A {@link VideoGraph} that handles multiple input streams. */
|
||||
@UnstableApi
|
||||
public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
public final class MultipleInputVideoGraph implements VideoGraph {
|
||||
|
||||
private static final String TAG = "MultiInputVG";
|
||||
private static final String SHARED_EXECUTOR_NAME = "Effect:MultipleInputVideoGraph:Thread";
|
||||
@ -87,10 +91,9 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
private final ExecutorService sharedExecutorService;
|
||||
|
||||
private final DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
private final Queue<CompositorOutputTextureInfo> compositorOutputTextures;
|
||||
private final Queue<TimedGlTextureInfo> compositorOutputTextures;
|
||||
private final SparseArray<CompositorOutputTextureRelease> compositorOutputTextureReleases;
|
||||
|
||||
private final long initialTimestampOffsetUs;
|
||||
private final boolean renderFramesAutomatically;
|
||||
|
||||
@Nullable private VideoFrameProcessor compositionVideoFrameProcessor;
|
||||
@ -104,7 +107,52 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
|
||||
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||
|
||||
protected MultipleInputVideoGraph(
|
||||
/** A {@link VideoGraph.Factory} for {@link MultipleInputVideoGraph}. */
|
||||
public static final class Factory implements VideoGraph.Factory {
|
||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
|
||||
/**
|
||||
* A {@code Factory} for {@link MultipleInputVideoGraph} that uses a {@link
|
||||
* DefaultVideoFrameProcessor.Factory}.
|
||||
*/
|
||||
public Factory() {
|
||||
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
|
||||
}
|
||||
|
||||
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
|
||||
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultipleInputVideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs,
|
||||
boolean renderFramesAutomatically) {
|
||||
return new MultipleInputVideoGraph(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
debugViewProvider,
|
||||
listener,
|
||||
listenerExecutor,
|
||||
videoCompositorSettings,
|
||||
compositionEffects,
|
||||
renderFramesAutomatically);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsMultipleInputs() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private MultipleInputVideoGraph(
|
||||
Context context,
|
||||
VideoFrameProcessor.Factory videoFrameProcessorFactory,
|
||||
ColorInfo outputColorInfo,
|
||||
@ -113,7 +161,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs,
|
||||
boolean renderFramesAutomatically) {
|
||||
checkArgument(videoFrameProcessorFactory instanceof DefaultVideoFrameProcessor.Factory);
|
||||
this.context = context;
|
||||
@ -123,7 +170,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
this.listenerExecutor = listenerExecutor;
|
||||
this.videoCompositorSettings = videoCompositorSettings;
|
||||
this.compositionEffects = new ArrayList<>(compositionEffects);
|
||||
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
||||
this.renderFramesAutomatically = renderFramesAutomatically;
|
||||
lastRenderedPresentationTimeUs = C.TIME_UNSET;
|
||||
preProcessors = new SparseArray<>();
|
||||
@ -183,14 +229,17 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
if (presentationTimeUs == 0) {
|
||||
hasProducedFrameWithTimestampZero = true;
|
||||
}
|
||||
lastRenderedPresentationTimeUs = presentationTimeUs;
|
||||
|
||||
listenerExecutor.execute(
|
||||
() -> listener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
||||
() ->
|
||||
listener.onOutputFrameAvailableForRendering(
|
||||
presentationTimeUs, isRedrawnFrame));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -268,12 +317,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
preProcessors.put(inputIndex, preProcessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrameProcessor getProcessor(int inputIndex) {
|
||||
checkState(contains(preProcessors, inputIndex));
|
||||
return preProcessors.get(inputIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
|
||||
checkNotNull(compositionVideoFrameProcessor).setOutputSurfaceInfo(outputSurfaceInfo);
|
||||
@ -284,6 +327,75 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
return hasProducedFrameWithTimestampZero;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(
|
||||
int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
||||
return getProcessor(inputIndex).queueInputBitmap(inputBitmap, timestampIterator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs) {
|
||||
return getProcessor(inputIndex).queueInputTexture(textureId, presentationTimeUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOnInputFrameProcessedListener(
|
||||
int inputIndex, OnInputFrameProcessedListener listener) {
|
||||
getProcessor(inputIndex).setOnInputFrameProcessedListener(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener) {
|
||||
getProcessor(inputIndex).setOnInputSurfaceReadyListener(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Surface getInputSurface(int inputIndex) {
|
||||
return getProcessor(inputIndex).getInputSurface();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerInputStream(
|
||||
int inputIndex,
|
||||
@VideoFrameProcessor.InputType int inputType,
|
||||
Format format,
|
||||
List<Effect> effects,
|
||||
long offsetToAddUs) {
|
||||
getProcessor(inputIndex).registerInputStream(inputType, format, effects, offsetToAddUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerInputFrame(int inputIndex) {
|
||||
return getProcessor(inputIndex).registerInputFrame();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPendingInputFrameCount(int inputIndex) {
|
||||
return getProcessor(inputIndex).getPendingInputFrameCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderOutputFrame(long renderTimeNs) {
|
||||
checkNotNull(compositionVideoFrameProcessor).renderOutputFrame(renderTimeNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void redraw() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
for (int i = 0; i < preProcessors.size(); i++) {
|
||||
preProcessors.get(preProcessors.keyAt(i)).flush();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void signalEndOfInput(int inputIndex) {
|
||||
getProcessor(inputIndex).signalEndOfInput();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (released) {
|
||||
@ -327,12 +439,9 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
released = true;
|
||||
}
|
||||
|
||||
protected VideoFrameProcessor getCompositionVideoFrameProcessor() {
|
||||
return checkStateNotNull(compositionVideoFrameProcessor);
|
||||
}
|
||||
|
||||
protected long getInitialTimestampOffsetUs() {
|
||||
return initialTimestampOffsetUs;
|
||||
private VideoFrameProcessor getProcessor(int inputIndex) {
|
||||
checkState(contains(preProcessors, inputIndex));
|
||||
return preProcessors.get(inputIndex);
|
||||
}
|
||||
|
||||
// This method is called on the sharedExecutorService.
|
||||
@ -363,8 +472,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
DebugTraceUtil.logEvent(
|
||||
COMPONENT_COMPOSITOR, EVENT_OUTPUT_TEXTURE_RENDERED, presentationTimeUs);
|
||||
|
||||
compositorOutputTextures.add(
|
||||
new CompositorOutputTextureInfo(outputTexture, presentationTimeUs));
|
||||
compositorOutputTextures.add(new TimedGlTextureInfo(outputTexture, presentationTimeUs));
|
||||
compositorOutputTextureReleases.put(
|
||||
outputTexture.texId,
|
||||
new CompositorOutputTextureRelease(textureProducer, presentationTimeUs));
|
||||
@ -421,7 +529,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
return;
|
||||
}
|
||||
|
||||
@Nullable CompositorOutputTextureInfo outputTexture = compositorOutputTextures.peek();
|
||||
@Nullable TimedGlTextureInfo outputTexture = compositorOutputTextures.peek();
|
||||
if (outputTexture == null) {
|
||||
return;
|
||||
}
|
||||
@ -446,16 +554,6 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
|
||||
: VideoFrameProcessingException.from(e)));
|
||||
}
|
||||
|
||||
private static final class CompositorOutputTextureInfo {
|
||||
public final GlTextureInfo glTextureInfo;
|
||||
public final long presentationTimeUs;
|
||||
|
||||
private CompositorOutputTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
|
||||
this.glTextureInfo = glTextureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
}
|
||||
}
|
||||
|
||||
private static final class CompositorOutputTextureRelease {
|
||||
private final GlTextureProducer textureProducer;
|
||||
private final long presentationTimeUs;
|
||||
|
@ -131,7 +131,8 @@ public final class Presentation implements MatrixTransformation {
|
||||
/* height= */ C.LENGTH_UNSET,
|
||||
aspectRatio,
|
||||
layout,
|
||||
TEXTURE_MIN_FILTER_LINEAR);
|
||||
TEXTURE_MIN_FILTER_LINEAR,
|
||||
/* preservePortraitWhenApplicable= */ false);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -148,7 +149,8 @@ public final class Presentation implements MatrixTransformation {
|
||||
height,
|
||||
ASPECT_RATIO_UNSET,
|
||||
LAYOUT_SCALE_TO_FIT,
|
||||
TEXTURE_MIN_FILTER_LINEAR);
|
||||
TEXTURE_MIN_FILTER_LINEAR,
|
||||
/* preservePortraitWhenApplicable= */ false);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -166,7 +168,33 @@ public final class Presentation implements MatrixTransformation {
|
||||
checkArgument(width > 0, "width " + width + " must be positive");
|
||||
checkArgument(height > 0, "height " + height + " must be positive");
|
||||
checkLayout(layout);
|
||||
return new Presentation(width, height, ASPECT_RATIO_UNSET, layout, TEXTURE_MIN_FILTER_LINEAR);
|
||||
return new Presentation(
|
||||
width,
|
||||
height,
|
||||
ASPECT_RATIO_UNSET,
|
||||
layout,
|
||||
TEXTURE_MIN_FILTER_LINEAR,
|
||||
/* preservePortraitWhenApplicable= */ false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Presentation} instance.
|
||||
*
|
||||
* <p>The output frame will have a short side matching the given value. The longest side will
|
||||
* scale to preserve the input aspect * ratio. For example, passing a shortSide of 480 will scale
|
||||
* a 1440x1920 video to 480x640 or a 1920x1440 video to 640x480.
|
||||
*
|
||||
* @param shortSide The length of the short side of the output frame, in pixels.
|
||||
*/
|
||||
public static Presentation createForShortSide(int shortSide) {
|
||||
checkArgument(shortSide > 0, "shortSide " + shortSide + " must be positive");
|
||||
return new Presentation(
|
||||
/* width= */ C.LENGTH_UNSET,
|
||||
/* height= */ shortSide,
|
||||
ASPECT_RATIO_UNSET,
|
||||
LAYOUT_SCALE_TO_FIT,
|
||||
TEXTURE_MIN_FILTER_LINEAR,
|
||||
/* preservePortraitWhenApplicable= */ true);
|
||||
}
|
||||
|
||||
private final int requestedWidthPixels;
|
||||
@ -174,6 +202,7 @@ public final class Presentation implements MatrixTransformation {
|
||||
private float requestedAspectRatio;
|
||||
private final @Layout int layout;
|
||||
private final @C.TextureMinFilter int textureMinFilter;
|
||||
private final boolean preservePortraitWhenApplicable;
|
||||
|
||||
private float outputWidth;
|
||||
private float outputHeight;
|
||||
@ -184,7 +213,8 @@ public final class Presentation implements MatrixTransformation {
|
||||
int height,
|
||||
float aspectRatio,
|
||||
@Layout int layout,
|
||||
@C.TextureMinFilter int textureMinFilter) {
|
||||
@C.TextureMinFilter int textureMinFilter,
|
||||
boolean preservePortraitWhenApplicable) {
|
||||
checkArgument(
|
||||
(aspectRatio == ASPECT_RATIO_UNSET) || (width == C.LENGTH_UNSET),
|
||||
"width and aspect ratio should not both be set");
|
||||
@ -194,6 +224,7 @@ public final class Presentation implements MatrixTransformation {
|
||||
this.requestedAspectRatio = aspectRatio;
|
||||
this.layout = layout;
|
||||
this.textureMinFilter = textureMinFilter;
|
||||
this.preservePortraitWhenApplicable = preservePortraitWhenApplicable;
|
||||
|
||||
outputWidth = C.LENGTH_UNSET;
|
||||
outputHeight = C.LENGTH_UNSET;
|
||||
@ -214,7 +245,8 @@ public final class Presentation implements MatrixTransformation {
|
||||
requestedHeightPixels,
|
||||
requestedAspectRatio,
|
||||
layout,
|
||||
textureMinFilter);
|
||||
textureMinFilter,
|
||||
preservePortraitWhenApplicable);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -243,10 +275,15 @@ public final class Presentation implements MatrixTransformation {
|
||||
if (requestedHeightPixels != C.LENGTH_UNSET) {
|
||||
if (requestedWidthPixels != C.LENGTH_UNSET) {
|
||||
outputWidth = requestedWidthPixels;
|
||||
outputHeight = requestedHeightPixels;
|
||||
} else if (preservePortraitWhenApplicable && inputHeight > inputWidth) {
|
||||
// Swap width and height if the input orientation should be respected.
|
||||
outputHeight = requestedHeightPixels * outputHeight / outputWidth;
|
||||
outputWidth = requestedHeightPixels;
|
||||
} else {
|
||||
outputWidth = requestedHeightPixels * outputWidth / outputHeight;
|
||||
outputHeight = requestedHeightPixels;
|
||||
}
|
||||
outputHeight = requestedHeightPixels;
|
||||
}
|
||||
return new Size(Math.round(outputWidth), Math.round(outputHeight));
|
||||
}
|
||||
|
@ -1,104 +0,0 @@
|
||||
/*
|
||||
* Copyright 2024 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.DebugViewProvider;
|
||||
import androidx.media3.common.Effect;
|
||||
import androidx.media3.common.PreviewingVideoGraph;
|
||||
import androidx.media3.common.VideoCompositorSettings;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
* A {@linkplain PreviewingVideoGraph previewing} specific implementation of {@link
|
||||
* MultipleInputVideoGraph}.
|
||||
*/
|
||||
@UnstableApi
|
||||
public final class PreviewingMultipleInputVideoGraph extends MultipleInputVideoGraph
|
||||
implements PreviewingVideoGraph {
|
||||
|
||||
/** A factory for creating a {@link PreviewingMultipleInputVideoGraph}. */
|
||||
public static final class Factory implements PreviewingVideoGraph.Factory {
|
||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
|
||||
/**
|
||||
* Creates a new factory that uses the {@link DefaultVideoFrameProcessor.Factory} with its
|
||||
* default values.
|
||||
*/
|
||||
public Factory() {
|
||||
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory.Builder().build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public PreviewingVideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs) {
|
||||
return new PreviewingMultipleInputVideoGraph(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
debugViewProvider,
|
||||
listener,
|
||||
listenerExecutor,
|
||||
videoCompositorSettings,
|
||||
compositionEffects,
|
||||
initialTimestampOffsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsMultipleInputs() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private PreviewingMultipleInputVideoGraph(
|
||||
Context context,
|
||||
VideoFrameProcessor.Factory videoFrameProcessorFactory,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs) {
|
||||
super(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
debugViewProvider,
|
||||
listener,
|
||||
listenerExecutor,
|
||||
videoCompositorSettings,
|
||||
compositionEffects,
|
||||
initialTimestampOffsetUs,
|
||||
/* renderFramesAutomatically= */ false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderOutputFrame(long renderTimeNs) {
|
||||
getCompositionVideoFrameProcessor().renderOutputFrame(renderTimeNs);
|
||||
}
|
||||
}
|
@ -1,110 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package androidx.media3.effect;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.DebugViewProvider;
|
||||
import androidx.media3.common.Effect;
|
||||
import androidx.media3.common.PreviewingVideoGraph;
|
||||
import androidx.media3.common.VideoCompositorSettings;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
* A {@link PreviewingVideoGraph Previewing} specific implementation of {@link
|
||||
* SingleInputVideoGraph}.
|
||||
*/
|
||||
@UnstableApi
|
||||
public final class PreviewingSingleInputVideoGraph extends SingleInputVideoGraph
|
||||
implements PreviewingVideoGraph {
|
||||
|
||||
/** A factory for creating a {@link PreviewingSingleInputVideoGraph}. */
|
||||
public static final class Factory implements PreviewingVideoGraph.Factory {
|
||||
|
||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
|
||||
/**
|
||||
* Creates a new factory that uses the {@link DefaultVideoFrameProcessor.Factory} with its
|
||||
* default values.
|
||||
*/
|
||||
public Factory() {
|
||||
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance that uses the supplied {@code videoFrameProcessorFactory} to create
|
||||
* {@link VideoFrameProcessor} instances.
|
||||
*/
|
||||
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
|
||||
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PreviewingVideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs) {
|
||||
return new PreviewingSingleInputVideoGraph(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
debugViewProvider,
|
||||
listener,
|
||||
listenerExecutor,
|
||||
initialTimestampOffsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsMultipleInputs() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private PreviewingSingleInputVideoGraph(
|
||||
Context context,
|
||||
VideoFrameProcessor.Factory videoFrameProcessorFactory,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
long initialTimestampOffsetUs) {
|
||||
super(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
listener,
|
||||
debugViewProvider,
|
||||
listenerExecutor,
|
||||
VideoCompositorSettings.DEFAULT,
|
||||
// Previewing needs frame render timing.
|
||||
/* renderFramesAutomatically= */ false,
|
||||
initialTimestampOffsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderOutputFrame(long renderTimeNs) {
|
||||
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
|
||||
}
|
||||
}
|
@ -142,7 +142,7 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
private final ConcurrentEffect<T> concurrentEffect;
|
||||
private final TexturePool outputTexturePool;
|
||||
private final Queue<TimedTextureInfo<T>> frameQueue;
|
||||
private final Queue<QueuedFrame<T>> frameQueue;
|
||||
private InputListener inputListener;
|
||||
private OutputListener outputListener;
|
||||
private ErrorListener errorListener;
|
||||
@ -226,7 +226,8 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
Future<T> task =
|
||||
concurrentEffect.queueInputFrame(glObjectsProvider, outputTexture, presentationTimeUs);
|
||||
frameQueue.add(new TimedTextureInfo<T>(outputTexture, presentationTimeUs, task));
|
||||
frameQueue.add(
|
||||
new QueuedFrame<T>(new TimedGlTextureInfo(outputTexture, presentationTimeUs), task));
|
||||
|
||||
inputListener.onInputFrameProcessed(inputTexture);
|
||||
|
||||
@ -297,25 +298,28 @@ import java.util.concurrent.TimeUnit;
|
||||
* <p>Returns {@code false} if no more frames are available for output.
|
||||
*/
|
||||
private boolean outputOneFrame() {
|
||||
TimedTextureInfo<T> timedTextureInfo = frameQueue.poll();
|
||||
if (timedTextureInfo == null) {
|
||||
QueuedFrame<T> queuedFrame = frameQueue.poll();
|
||||
if (queuedFrame == null) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
T result =
|
||||
Futures.getChecked(
|
||||
timedTextureInfo.task,
|
||||
queuedFrame.task,
|
||||
VideoFrameProcessingException.class,
|
||||
PROCESSING_TIMEOUT_MS,
|
||||
TimeUnit.MILLISECONDS);
|
||||
GlUtil.focusFramebufferUsingCurrentContext(
|
||||
timedTextureInfo.textureInfo.fboId,
|
||||
timedTextureInfo.textureInfo.width,
|
||||
timedTextureInfo.textureInfo.height);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.fboId,
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.width,
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo.height);
|
||||
concurrentEffect.finishProcessingAndBlend(
|
||||
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs, result);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo,
|
||||
queuedFrame.timedGlTextureInfo.presentationTimeUs,
|
||||
result);
|
||||
outputListener.onOutputFrameAvailable(
|
||||
timedTextureInfo.textureInfo, timedTextureInfo.presentationTimeUs);
|
||||
queuedFrame.timedGlTextureInfo.glTextureInfo,
|
||||
queuedFrame.timedGlTextureInfo.presentationTimeUs);
|
||||
return true;
|
||||
} catch (GlUtil.GlException | VideoFrameProcessingException e) {
|
||||
onError(e);
|
||||
@ -324,9 +328,9 @@ import java.util.concurrent.TimeUnit;
|
||||
}
|
||||
|
||||
private void cancelProcessingOfPendingFrames() {
|
||||
TimedTextureInfo<T> timedTextureInfo;
|
||||
while ((timedTextureInfo = frameQueue.poll()) != null) {
|
||||
timedTextureInfo.task.cancel(/* mayInterruptIfRunning= */ false);
|
||||
QueuedFrame<T> queuedFrame;
|
||||
while ((queuedFrame = frameQueue.poll()) != null) {
|
||||
queuedFrame.task.cancel(/* mayInterruptIfRunning= */ false);
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,14 +339,12 @@ import java.util.concurrent.TimeUnit;
|
||||
() -> errorListener.onError(VideoFrameProcessingException.from(e)));
|
||||
}
|
||||
|
||||
private static class TimedTextureInfo<T> {
|
||||
final GlTextureInfo textureInfo;
|
||||
final long presentationTimeUs;
|
||||
final Future<T> task;
|
||||
private static final class QueuedFrame<T> {
|
||||
public final TimedGlTextureInfo timedGlTextureInfo;
|
||||
public final Future<T> task;
|
||||
|
||||
TimedTextureInfo(GlTextureInfo textureInfo, long presentationTimeUs, Future<T> task) {
|
||||
this.textureInfo = textureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
public QueuedFrame(TimedGlTextureInfo timedGlTextureInfo, Future<T> task) {
|
||||
this.timedGlTextureInfo = timedGlTextureInfo;
|
||||
this.task = task;
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,124 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
|
||||
/**
|
||||
* A shader program that caches the input frames, and {@linkplain #replayFrame replays} the oldest
|
||||
* input frame when instructed.
|
||||
*/
|
||||
/* package */ final class ReplayableFrameCacheGlShaderProgram extends FrameCacheGlShaderProgram {
|
||||
private static final int CAPACITY = 2;
|
||||
private static final int REPLAY_FRAME_INDEX = 0;
|
||||
private static final int REGULAR_FRAME_INDEX = 1;
|
||||
|
||||
// Use a manually managed array to be more efficient than List add/remove methods.
|
||||
private final TimedGlTextureInfo[] cachedFrames;
|
||||
private int cacheSize;
|
||||
|
||||
public ReplayableFrameCacheGlShaderProgram(Context context, boolean useHdr)
|
||||
throws VideoFrameProcessingException {
|
||||
super(context, CAPACITY, useHdr);
|
||||
cachedFrames = new TimedGlTextureInfo[CAPACITY];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInputFrame(
|
||||
GlObjectsProvider glObjectsProvider, GlTextureInfo inputTexture, long presentationTimeUs) {
|
||||
checkState(cacheSize < CAPACITY);
|
||||
super.queueInputFrame(glObjectsProvider, inputTexture, presentationTimeUs);
|
||||
cachedFrames[cacheSize++] =
|
||||
new TimedGlTextureInfo(
|
||||
checkNotNull(outputTexturePool.getMostRecentlyUsedTexture()), presentationTimeUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseOutputFrame(GlTextureInfo outputTexture) {
|
||||
// Do nothing here as this method will be called as soon as the output frame is queued into the
|
||||
// subsequent shader program. This class only releases output frame based on rendering event
|
||||
// from the FinalShaderProgramWrapper. See onFrameRendered().
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
cacheSize = 0;
|
||||
super.flush();
|
||||
}
|
||||
|
||||
/** Returns whether there is no cached frame. */
|
||||
public boolean isEmpty() {
|
||||
return cacheSize == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the presentation time of the frame that will be replayed, if {@link #replayFrame()} is
|
||||
* called.
|
||||
*/
|
||||
public long getReplayFramePresentationTimeUs() {
|
||||
if (isEmpty()) {
|
||||
return C.TIME_UNSET;
|
||||
}
|
||||
return cachedFrames[REPLAY_FRAME_INDEX].presentationTimeUs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replays the frame from cache, with the {@linkplain #getReplayFramePresentationTimeUs replay
|
||||
* timestamp}.
|
||||
*/
|
||||
public void replayFrame() {
|
||||
if (isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the oldest frame that is queued.
|
||||
TimedGlTextureInfo oldestFrame = cachedFrames[REPLAY_FRAME_INDEX];
|
||||
getOutputListener()
|
||||
.onOutputFrameAvailable(oldestFrame.glTextureInfo, oldestFrame.presentationTimeUs);
|
||||
|
||||
// Queue the subsequent frame also to keep the player's output frame queue full.
|
||||
if (cacheSize > 1) {
|
||||
TimedGlTextureInfo secondOldestFrame = cachedFrames[REGULAR_FRAME_INDEX];
|
||||
getOutputListener()
|
||||
.onOutputFrameAvailable(
|
||||
secondOldestFrame.glTextureInfo, secondOldestFrame.presentationTimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
/** Removes a frame from the cache when a frame of the {@code presentationTimeUs} is rendered. */
|
||||
public void onFrameRendered(long presentationTimeUs) {
|
||||
// Cache needs to be full when capacity is two, only release frame n when frame n+1 is released.
|
||||
if (cacheSize < CAPACITY
|
||||
|| presentationTimeUs < cachedFrames[REGULAR_FRAME_INDEX].presentationTimeUs) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Evict the oldest frame.
|
||||
TimedGlTextureInfo cachedFrame = cachedFrames[REPLAY_FRAME_INDEX];
|
||||
cachedFrames[REPLAY_FRAME_INDEX] = cachedFrames[REGULAR_FRAME_INDEX];
|
||||
cacheSize--;
|
||||
|
||||
// Release the texture, this also calls readyToAcceptInput.
|
||||
super.releaseOutputFrame(cachedFrame.glTextureInfo);
|
||||
}
|
||||
}
|
@ -16,27 +16,34 @@
|
||||
|
||||
package androidx.media3.effect;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.view.Surface;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.DebugViewProvider;
|
||||
import androidx.media3.common.Effect;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.OnInputFrameProcessedListener;
|
||||
import androidx.media3.common.SurfaceInfo;
|
||||
import androidx.media3.common.VideoCompositorSettings;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.VideoGraph;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/** A {@link VideoGraph} that handles one input stream. */
|
||||
@UnstableApi
|
||||
public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
public class SingleInputVideoGraph implements VideoGraph {
|
||||
|
||||
private final Context context;
|
||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
@ -44,8 +51,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
private final Listener listener;
|
||||
private final DebugViewProvider debugViewProvider;
|
||||
private final Executor listenerExecutor;
|
||||
private final List<Effect> compositionEffects;
|
||||
private final boolean renderFramesAutomatically;
|
||||
private final long initialTimestampOffsetUs;
|
||||
|
||||
@Nullable private VideoFrameProcessor videoFrameProcessor;
|
||||
@Nullable private SurfaceInfo outputSurfaceInfo;
|
||||
@ -53,6 +60,51 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||
private int inputIndex;
|
||||
|
||||
/** A {@link VideoGraph.Factory} for {@link SingleInputVideoGraph}. */
|
||||
public static final class Factory implements VideoGraph.Factory {
|
||||
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
|
||||
|
||||
/**
|
||||
* A {@code Factory} for {@link SingleInputVideoGraph} that uses a {@link
|
||||
* DefaultVideoFrameProcessor.Factory}.
|
||||
*/
|
||||
public Factory() {
|
||||
this(new DefaultVideoFrameProcessor.Factory.Builder().build());
|
||||
}
|
||||
|
||||
public Factory(VideoFrameProcessor.Factory videoFrameProcessorFactory) {
|
||||
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SingleInputVideoGraph create(
|
||||
Context context,
|
||||
ColorInfo outputColorInfo,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Listener listener,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
List<Effect> compositionEffects,
|
||||
long initialTimestampOffsetUs,
|
||||
boolean renderFramesAutomatically) {
|
||||
return new SingleInputVideoGraph(
|
||||
context,
|
||||
videoFrameProcessorFactory,
|
||||
outputColorInfo,
|
||||
listener,
|
||||
compositionEffects,
|
||||
debugViewProvider,
|
||||
listenerExecutor,
|
||||
videoCompositorSettings,
|
||||
renderFramesAutomatically);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsMultipleInputs() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
*
|
||||
@ -63,11 +115,11 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
VideoFrameProcessor.Factory videoFrameProcessorFactory,
|
||||
ColorInfo outputColorInfo,
|
||||
Listener listener,
|
||||
List<Effect> compositionEffects,
|
||||
DebugViewProvider debugViewProvider,
|
||||
Executor listenerExecutor,
|
||||
VideoCompositorSettings videoCompositorSettings,
|
||||
boolean renderFramesAutomatically,
|
||||
long initialTimestampOffsetUs) {
|
||||
boolean renderFramesAutomatically) {
|
||||
checkState(
|
||||
VideoCompositorSettings.DEFAULT.equals(videoCompositorSettings),
|
||||
"SingleInputVideoGraph does not use VideoCompositor, and therefore cannot apply"
|
||||
@ -78,8 +130,8 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
this.listener = listener;
|
||||
this.debugViewProvider = debugViewProvider;
|
||||
this.listenerExecutor = listenerExecutor;
|
||||
this.compositionEffects = compositionEffects;
|
||||
this.renderFramesAutomatically = renderFramesAutomatically;
|
||||
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
|
||||
this.inputIndex = C.INDEX_UNSET;
|
||||
}
|
||||
|
||||
@ -120,14 +172,17 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||
public void onOutputFrameAvailableForRendering(
|
||||
long presentationTimeUs, boolean isRedrawnFrame) {
|
||||
// Frames are rendered automatically.
|
||||
if (presentationTimeUs == 0) {
|
||||
hasProducedFrameWithTimestampZero = true;
|
||||
}
|
||||
lastProcessedFramePresentationTimeUs = presentationTimeUs;
|
||||
listenerExecutor.execute(
|
||||
() -> listener.onOutputFrameAvailableForRendering(presentationTimeUs));
|
||||
() ->
|
||||
listener.onOutputFrameAvailableForRendering(
|
||||
presentationTimeUs, isRedrawnFrame));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -146,12 +201,6 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrameProcessor getProcessor(int inputIndex) {
|
||||
checkArgument(this.inputIndex != C.INDEX_UNSET && this.inputIndex == inputIndex);
|
||||
return checkStateNotNull(videoFrameProcessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
|
||||
this.outputSurfaceInfo = outputSurfaceInfo;
|
||||
@ -165,6 +214,88 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
return hasProducedFrameWithTimestampZero;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(
|
||||
int inputIndex, Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
return videoFrameProcessor.queueInputBitmap(inputBitmap, timestampIterator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputTexture(int inputIndex, int textureId, long presentationTimeUs) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
return videoFrameProcessor.queueInputTexture(textureId, presentationTimeUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOnInputFrameProcessedListener(
|
||||
int inputIndex, OnInputFrameProcessedListener listener) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setOnInputSurfaceReadyListener(int inputIndex, Runnable listener) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.setOnInputSurfaceReadyListener(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Surface getInputSurface(int inputIndex) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
return videoFrameProcessor.getInputSurface();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void registerInputStream(
|
||||
int inputIndex,
|
||||
@VideoFrameProcessor.InputType int inputType,
|
||||
Format format,
|
||||
List<Effect> effects,
|
||||
long offsetToAddUs) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.registerInputStream(
|
||||
inputType,
|
||||
format,
|
||||
new ImmutableList.Builder<Effect>().addAll(effects).addAll(compositionEffects).build(),
|
||||
offsetToAddUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean registerInputFrame(int inputIndex) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
return videoFrameProcessor.registerInputFrame();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getPendingInputFrameCount(int inputIndex) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
return videoFrameProcessor.getPendingInputFrameCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderOutputFrame(long renderTimeNs) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.renderOutputFrame(renderTimeNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void redraw() {
|
||||
checkStateNotNull(videoFrameProcessor).redraw();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void signalEndOfInput(int inputIndex) {
|
||||
checkStateNotNull(videoFrameProcessor);
|
||||
videoFrameProcessor.signalEndOfInput();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (released) {
|
||||
@ -177,12 +308,4 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
}
|
||||
released = true;
|
||||
}
|
||||
|
||||
protected int getInputIndex() {
|
||||
return inputIndex;
|
||||
}
|
||||
|
||||
protected long getInitialTimestampOffsetUs() {
|
||||
return initialTimestampOffsetUs;
|
||||
}
|
||||
}
|
||||
|
@ -17,18 +17,19 @@ package androidx.media3.effect;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import com.google.common.collect.Iterables;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Deque;
|
||||
import java.util.Iterator;
|
||||
import java.util.Queue;
|
||||
|
||||
/** Holds {@code capacity} textures, to re-use textures. */
|
||||
/* package */ final class TexturePool {
|
||||
private final Queue<GlTextureInfo> freeTextures;
|
||||
private final Queue<GlTextureInfo> inUseTextures;
|
||||
private final Deque<GlTextureInfo> freeTextures;
|
||||
private final Deque<GlTextureInfo> inUseTextures;
|
||||
private final int capacity;
|
||||
private final boolean useHighPrecisionColorComponents;
|
||||
|
||||
@ -94,6 +95,15 @@ import java.util.Queue;
|
||||
return texture;
|
||||
}
|
||||
|
||||
/** Returns the {@link GlTextureInfo} that is most recently {@linkplain #useTexture used}. */
|
||||
@Nullable
|
||||
public GlTextureInfo getMostRecentlyUsedTexture() {
|
||||
if (inUseTextures.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return inUseTextures.getLast();
|
||||
}
|
||||
|
||||
/**
|
||||
* Frees the texture represented by {@code textureInfo}.
|
||||
*
|
||||
|
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2025 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package androidx.media3.effect;
|
||||
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
|
||||
/* package */ final class TimedGlTextureInfo {
|
||||
|
||||
/** The {@link GlTextureInfo}. */
|
||||
public final GlTextureInfo glTextureInfo;
|
||||
|
||||
/** The designated presentation time with the texture, in microseconds. */
|
||||
public final long presentationTimeUs;
|
||||
|
||||
/** Creates a new instance. */
|
||||
public TimedGlTextureInfo(GlTextureInfo glTextureInfo, long presentationTimeUs) {
|
||||
this.glTextureInfo = glTextureInfo;
|
||||
this.presentationTimeUs = presentationTimeUs;
|
||||
}
|
||||
}
|
@ -92,4 +92,60 @@ public final class PresentationTest {
|
||||
assertThat(outputSize.getWidth()).isEqualTo(requestedWidth);
|
||||
assertThat(outputSize.getHeight()).isEqualTo(requestedHeight);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configure_createForShortSideWithPortraitInput_changesDimension() {
|
||||
int inputWidth = 720;
|
||||
int inputHeight = 1280;
|
||||
Presentation presentation = Presentation.createForShortSide(1080);
|
||||
|
||||
Size outputSize = presentation.configure(inputWidth, inputHeight);
|
||||
boolean isNoOp = presentation.isNoOp(inputWidth, inputHeight);
|
||||
|
||||
assertThat(isNoOp).isFalse();
|
||||
assertThat(outputSize.getWidth()).isEqualTo(1080);
|
||||
assertThat(outputSize.getHeight()).isEqualTo(1920);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configure_createForShortSideWithPortraitInputNoEdit_leavesFramesUnchanged() {
|
||||
int inputWidth = 720;
|
||||
int inputHeight = 1280;
|
||||
Presentation presentation = Presentation.createForShortSide(inputWidth);
|
||||
|
||||
Size outputSize = presentation.configure(inputWidth, inputHeight);
|
||||
boolean isNoOp = presentation.isNoOp(inputWidth, inputHeight);
|
||||
|
||||
assertThat(isNoOp).isTrue();
|
||||
assertThat(outputSize.getWidth()).isEqualTo(inputWidth);
|
||||
assertThat(outputSize.getHeight()).isEqualTo(inputHeight);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configure_createForShortSideWithLandscapeInput_changesDimension() {
|
||||
int inputWidth = 1280;
|
||||
int inputHeight = 720;
|
||||
Presentation presentation = Presentation.createForShortSide(1080);
|
||||
|
||||
Size outputSize = presentation.configure(inputWidth, inputHeight);
|
||||
boolean isNoOp = presentation.isNoOp(inputWidth, inputHeight);
|
||||
|
||||
assertThat(isNoOp).isFalse();
|
||||
assertThat(outputSize.getWidth()).isEqualTo(1920);
|
||||
assertThat(outputSize.getHeight()).isEqualTo(1080);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void configure_createForShortSideWithLandscapeInputNoEdit_leavesFramesUnchanged() {
|
||||
int inputWidth = 1280;
|
||||
int inputHeight = 720;
|
||||
Presentation presentation = Presentation.createForShortSide(720);
|
||||
|
||||
Size outputSize = presentation.configure(inputWidth, inputHeight);
|
||||
boolean isNoOp = presentation.isNoOp(inputWidth, inputHeight);
|
||||
|
||||
assertThat(isNoOp).isTrue();
|
||||
assertThat(outputSize.getWidth()).isEqualTo(1280);
|
||||
assertThat(outputSize.getHeight()).isEqualTo(720);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user