mirror of
https://github.com/androidx/media.git
synced 2025-05-14 19:19:58 +08:00
Rename the SamplePipelines
The old names are not really correct anymore because: - The Audio/VideoTranscodingSamplePipelines do not decode anymore. - The pipelines now mux the encoded data. PiperOrigin-RevId: 499498446
This commit is contained in:
parent
37f8a0bb6b
commit
cfc61655ce
@ -36,7 +36,7 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
||||
import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
/** Pipeline to process, re-encode and mux raw audio samples. */
|
||||
/* package */ final class AudioTranscodingSamplePipeline extends SamplePipeline {
|
||||
/* package */ final class AudioSamplePipeline extends SamplePipeline {
|
||||
|
||||
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
|
||||
|
||||
@ -53,7 +53,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
private long encoderBufferDurationRemainder;
|
||||
|
||||
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
||||
public AudioTranscodingSamplePipeline(
|
||||
public AudioSamplePipeline(
|
||||
Format inputFormat,
|
||||
long streamStartPositionUs,
|
||||
long streamOffsetUs,
|
@ -21,14 +21,14 @@ import androidx.media3.common.Format;
|
||||
import androidx.media3.decoder.DecoderInputBuffer;
|
||||
|
||||
/** Pipeline that muxes encoded samples without any transcoding or transformation. */
|
||||
/* package */ final class PassthroughSamplePipeline extends SamplePipeline {
|
||||
/* package */ final class EncodedSamplePipeline extends SamplePipeline {
|
||||
|
||||
private final DecoderInputBuffer buffer;
|
||||
private final Format format;
|
||||
|
||||
private boolean hasPendingBuffer;
|
||||
|
||||
public PassthroughSamplePipeline(
|
||||
public EncodedSamplePipeline(
|
||||
Format format,
|
||||
long streamStartPositionUs,
|
||||
TransformationRequest transformationRequest,
|
@ -505,7 +505,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) == 0;
|
||||
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
|
||||
if (isAudio && shouldUseTranscodingPipeline) {
|
||||
return new AudioTranscodingSamplePipeline(
|
||||
return new AudioSamplePipeline(
|
||||
inputFormat,
|
||||
streamStartPositionUs,
|
||||
streamOffsetUs,
|
||||
@ -516,7 +516,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
muxerWrapper,
|
||||
fallbackListener);
|
||||
} else if (shouldUseTranscodingPipeline) {
|
||||
return new VideoTranscodingSamplePipeline(
|
||||
return new VideoSamplePipeline(
|
||||
context,
|
||||
inputFormat,
|
||||
streamStartPositionUs,
|
||||
@ -530,7 +530,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
fallbackListener,
|
||||
debugViewProvider);
|
||||
} else {
|
||||
return new PassthroughSamplePipeline(
|
||||
return new EncodedSamplePipeline(
|
||||
inputFormat,
|
||||
streamStartPositionUs,
|
||||
transformationRequest,
|
||||
|
@ -54,7 +54,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
/** Pipeline to process, re-encode and mux raw video frames. */
|
||||
/* package */ final class VideoTranscodingSamplePipeline extends SamplePipeline {
|
||||
/* package */ final class VideoSamplePipeline extends SamplePipeline {
|
||||
|
||||
private final FrameProcessor frameProcessor;
|
||||
private final ColorInfo frameProcessorInputColor;
|
||||
@ -68,7 +68,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
*/
|
||||
private volatile long finalFramePresentationTimeUs;
|
||||
|
||||
public VideoTranscodingSamplePipeline(
|
||||
public VideoSamplePipeline(
|
||||
Context context,
|
||||
Format inputFormat,
|
||||
long streamStartPositionUs,
|
||||
@ -208,7 +208,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
||||
|
||||
@Override
|
||||
public void onFrameProcessingEnded() {
|
||||
VideoTranscodingSamplePipeline.this.finalFramePresentationTimeUs =
|
||||
VideoSamplePipeline.this.finalFramePresentationTimeUs =
|
||||
lastProcessedFramePresentationTimeUs;
|
||||
try {
|
||||
encoderWrapper.signalEndOfInputStream();
|
@ -40,7 +40,7 @@ import org.robolectric.shadows.MediaCodecInfoBuilder;
|
||||
import org.robolectric.shadows.ShadowMediaCodec;
|
||||
import org.robolectric.shadows.ShadowMediaCodecList;
|
||||
|
||||
/** Unit tests for {@link VideoTranscodingSamplePipeline.EncoderWrapper}. */
|
||||
/** Unit tests for {@link VideoSamplePipeline.EncoderWrapper}. */
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public final class VideoEncoderWrapperTest {
|
||||
private final TransformationRequest emptyTransformationRequest =
|
||||
@ -52,8 +52,8 @@ public final class VideoEncoderWrapperTest {
|
||||
new ListenerSet<>(Looper.myLooper(), Clock.DEFAULT, (listener, flags) -> {}),
|
||||
Clock.DEFAULT.createHandler(Looper.myLooper(), /* callback= */ null),
|
||||
emptyTransformationRequest);
|
||||
private final VideoTranscodingSamplePipeline.EncoderWrapper encoderWrapper =
|
||||
new VideoTranscodingSamplePipeline.EncoderWrapper(
|
||||
private final VideoSamplePipeline.EncoderWrapper encoderWrapper =
|
||||
new VideoSamplePipeline.EncoderWrapper(
|
||||
fakeEncoderFactory,
|
||||
/* inputFormat= */ new Format.Builder().setSampleMimeType(MimeTypes.VIDEO_H264).build(),
|
||||
/* muxerSupportedMimeTypes= */ ImmutableList.of(MimeTypes.VIDEO_H264),
|
||||
|
Loading…
x
Reference in New Issue
Block a user