mirror of
https://github.com/androidx/media.git
synced 2025-04-30 06:46:50 +08:00
Use the onPresentationEnded callback to detect end of offloaded audio
In offloaded audio playback, the `DefaultAudioSink` should use the `AudioTrack.StreamEventCallback` `onPresentationEnded` to note whether the AudioTrack has completed playing all pending data. PiperOrigin-RevId: 622885399
This commit is contained in:
parent
fad3257072
commit
5e85823ea0
@ -154,6 +154,8 @@ This release includes the following changes since the
|
|||||||
* Audio:
|
* Audio:
|
||||||
* Allow renderer recovery by disabling offload if audio track fails to
|
* Allow renderer recovery by disabling offload if audio track fails to
|
||||||
initialize in offload mode.
|
initialize in offload mode.
|
||||||
|
* For offloaded playback, use the `AudioTrack.StreamEventCallback` method
|
||||||
|
`onPresentationEnded` to identify when all pending data has been played.
|
||||||
* Video:
|
* Video:
|
||||||
* Add workaround for a device issue on Galaxy Tab S7 FE, Chromecast with
|
* Add workaround for a device issue on Galaxy Tab S7 FE, Chromecast with
|
||||||
Google TV, and Lenovo M10 FHD Plus that causes 60fps H265 streams to be
|
Google TV, and Lenovo M10 FHD Plus that causes 60fps H265 streams to be
|
||||||
|
@ -543,6 +543,7 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
private int preV21OutputBufferOffset;
|
private int preV21OutputBufferOffset;
|
||||||
private boolean handledEndOfStream;
|
private boolean handledEndOfStream;
|
||||||
private boolean stoppedAudioTrack;
|
private boolean stoppedAudioTrack;
|
||||||
|
private boolean handledOffloadOnPresentationEnded;
|
||||||
|
|
||||||
private boolean playing;
|
private boolean playing;
|
||||||
private boolean externalAudioSessionIdProvided;
|
private boolean externalAudioSessionIdProvided;
|
||||||
@ -1298,6 +1299,9 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
@Override
|
@Override
|
||||||
public boolean hasPendingData() {
|
public boolean hasPendingData() {
|
||||||
return isAudioTrackInitialized()
|
return isAudioTrackInitialized()
|
||||||
|
&& (Util.SDK_INT < 29
|
||||||
|
|| !audioTrack.isOffloadedPlayback()
|
||||||
|
|| !handledOffloadOnPresentationEnded)
|
||||||
&& audioTrackPositionTracker.hasPendingData(getWrittenFrames());
|
&& audioTrackPositionTracker.hasPendingData(getWrittenFrames());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1553,6 +1557,7 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
outputBuffer = null;
|
outputBuffer = null;
|
||||||
stoppedAudioTrack = false;
|
stoppedAudioTrack = false;
|
||||||
handledEndOfStream = false;
|
handledEndOfStream = false;
|
||||||
|
handledOffloadOnPresentationEnded = false;
|
||||||
avSyncHeader = null;
|
avSyncHeader = null;
|
||||||
bytesUntilNextAvSync = 0;
|
bytesUntilNextAvSync = 0;
|
||||||
trimmingAudioProcessor.resetTrimmedFrameCount();
|
trimmingAudioProcessor.resetTrimmedFrameCount();
|
||||||
@ -1966,6 +1971,15 @@ public final class DefaultAudioSink implements AudioSink {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onPresentationEnded(AudioTrack track) {
|
||||||
|
if (!track.equals(audioTrack)) {
|
||||||
|
// Stale event.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
handledOffloadOnPresentationEnded = true;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onTearDown(AudioTrack track) {
|
public void onTearDown(AudioTrack track) {
|
||||||
if (!track.equals(audioTrack)) {
|
if (!track.equals(audioTrack)) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user