Plumb audio through AudioGraph for Composition preview.
The dump file diff as part of this change is because using AudioGraph means the 2nd item is automatically edited to match the AudioFormat of the 1st item {44.1KHz mono}, rather than {48KHz stereo}. Manually verified that for the 2nd item, data output: * Before: 66936 bytes (16734 frames) output = 348_625us of audio. * After: 30750 bytes (15375 frames) output = 348_639us of audio. The small final buffer is caused by SonicAudioProcessor outputting all pending data when EOS queued, and is WAI. PiperOrigin-RevId: 580494578
This commit is contained in:
parent
8d1663a57f
commit
76474684f9
@ -89,6 +89,7 @@ import androidx.media3.common.ParserException;
|
|||||||
import androidx.media3.common.PlaybackException;
|
import androidx.media3.common.PlaybackException;
|
||||||
import androidx.media3.common.Player;
|
import androidx.media3.common.Player;
|
||||||
import androidx.media3.common.Player.Commands;
|
import androidx.media3.common.Player.Commands;
|
||||||
|
import androidx.media3.common.audio.AudioProcessor;
|
||||||
import com.google.common.base.Ascii;
|
import com.google.common.base.Ascii;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.math.DoubleMath;
|
import com.google.common.math.DoubleMath;
|
||||||
@ -2132,6 +2133,12 @@ public final class Util {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Gets a PCM {@link Format} based on the {@link AudioProcessor.AudioFormat}. */
|
||||||
|
@UnstableApi
|
||||||
|
public static Format getPcmFormat(AudioProcessor.AudioFormat audioFormat) {
|
||||||
|
return getPcmFormat(audioFormat.encoding, audioFormat.channelCount, audioFormat.sampleRate);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a sample bit depth to a corresponding PCM encoding constant.
|
* Converts a sample bit depth to a corresponding PCM encoding constant.
|
||||||
*
|
*
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
AudioSink:
|
||||||
|
buffer count = 10
|
||||||
|
config:
|
||||||
|
pcmEncoding = 2
|
||||||
|
channelCount = 1
|
||||||
|
sampleRate = 44100
|
||||||
|
buffer #0:
|
||||||
|
time = 1000000000000
|
||||||
|
data = -85819864
|
||||||
|
buffer #1:
|
||||||
|
time = 1000000100000
|
||||||
|
data = 566487491
|
||||||
|
buffer #2:
|
||||||
|
time = 1000000200000
|
||||||
|
data = -1256531710
|
||||||
|
buffer #3:
|
||||||
|
time = 1000000300000
|
||||||
|
data = 793455796
|
||||||
|
buffer #4:
|
||||||
|
time = 1000000400000
|
||||||
|
data = -268235582
|
||||||
|
buffer #5:
|
||||||
|
time = 1000000500000
|
||||||
|
data = -8136122
|
||||||
|
buffer #6:
|
||||||
|
time = 1000000600000
|
||||||
|
data = 1750866613
|
||||||
|
buffer #7:
|
||||||
|
time = 1000000700000
|
||||||
|
data = -1100753636
|
||||||
|
buffer #8:
|
||||||
|
time = 1000000800000
|
||||||
|
data = 507833230
|
||||||
|
buffer #9:
|
||||||
|
time = 1000000900000
|
||||||
|
data = 1472467506
|
@ -1,5 +1,5 @@
|
|||||||
AudioSink:
|
AudioSink:
|
||||||
buffer count = 14
|
buffer count = 15
|
||||||
config:
|
config:
|
||||||
pcmEncoding = 2
|
pcmEncoding = 2
|
||||||
channelCount = 1
|
channelCount = 1
|
||||||
@ -34,20 +34,18 @@ AudioSink:
|
|||||||
buffer #9:
|
buffer #9:
|
||||||
time = 1000000900000
|
time = 1000000900000
|
||||||
data = 1472467506
|
data = 1472467506
|
||||||
discontinuity:
|
|
||||||
config:
|
|
||||||
pcmEncoding = 2
|
|
||||||
channelCount = 2
|
|
||||||
sampleRate = 48000
|
|
||||||
buffer #10:
|
buffer #10:
|
||||||
time = 1000001000000
|
time = 1000001000000
|
||||||
data = -278103001
|
data = 1785344804
|
||||||
buffer #11:
|
buffer #11:
|
||||||
time = 1000001100000
|
time = 1000001100000
|
||||||
data = 1522105084
|
data = 458152960
|
||||||
buffer #12:
|
buffer #12:
|
||||||
time = 1000001200000
|
time = 1000001200000
|
||||||
data = 932319027
|
data = -2129352270
|
||||||
buffer #13:
|
buffer #13:
|
||||||
time = 1000001300000
|
time = 1000001300000
|
||||||
data = 325000240
|
data = 1572219123
|
||||||
|
buffer #14:
|
||||||
|
time = 1000001348616
|
||||||
|
data = -2263
|
||||||
|
@ -43,6 +43,20 @@ import java.nio.ByteBuffer;
|
|||||||
outputAudioFormat = AudioFormat.NOT_SET;
|
outputAudioFormat = AudioFormat.NOT_SET;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns whether an {@link AudioFormat} is valid as an input format. */
|
||||||
|
public static boolean isInputAudioFormatValid(AudioFormat format) {
|
||||||
|
if (format.encoding == Format.NO_VALUE) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (format.sampleRate == Format.NO_VALUE) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (format.channelCount == Format.NO_VALUE) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
/** Returns a new {@link AudioGraphInput} instance. */
|
/** Returns a new {@link AudioGraphInput} instance. */
|
||||||
public AudioGraphInput registerInput(EditedMediaItem item, Format format) throws ExportException {
|
public AudioGraphInput registerInput(EditedMediaItem item, Format format) throws ExportException {
|
||||||
checkArgument(format.pcmEncoding != Format.NO_VALUE);
|
checkArgument(format.pcmEncoding != Format.NO_VALUE);
|
||||||
|
@ -22,6 +22,7 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
|
|||||||
import static androidx.media3.common.util.Assertions.checkState;
|
import static androidx.media3.common.util.Assertions.checkState;
|
||||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||||
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT;
|
import static androidx.media3.decoder.DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT;
|
||||||
|
import static androidx.media3.transformer.AudioGraph.isInputAudioFormatValid;
|
||||||
|
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
@ -367,22 +368,6 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||||||
return audioProcessingPipeline;
|
return audioProcessingPipeline;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean isInputAudioFormatValid(AudioFormat format) {
|
|
||||||
if (format.encoding == Format.NO_VALUE) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (format.sampleRate == Format.NO_VALUE) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (format.channelCount == Format.NO_VALUE) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (format.bytesPerFrame == Format.NO_VALUE) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final class MediaItemChange {
|
private static final class MediaItemChange {
|
||||||
public final EditedMediaItem editedMediaItem;
|
public final EditedMediaItem editedMediaItem;
|
||||||
public final long durationUs;
|
public final long durationUs;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user