mirror of
https://github.com/androidx/media.git
synced 2025-05-15 03:29:53 +08:00
Rename the SamplePipelines
The old names are not really correct anymore because: - The Audio/VideoTranscodingSamplePipelines do not decode anymore. - The pipelines now mux the encoded data. PiperOrigin-RevId: 499498446
This commit is contained in:
parent
37f8a0bb6b
commit
cfc61655ce
@ -36,7 +36,7 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
|
|||||||
import org.checkerframework.dataflow.qual.Pure;
|
import org.checkerframework.dataflow.qual.Pure;
|
||||||
|
|
||||||
/** Pipeline to process, re-encode and mux raw audio samples. */
|
/** Pipeline to process, re-encode and mux raw audio samples. */
|
||||||
/* package */ final class AudioTranscodingSamplePipeline extends SamplePipeline {
|
/* package */ final class AudioSamplePipeline extends SamplePipeline {
|
||||||
|
|
||||||
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
|
private static final int DEFAULT_ENCODER_BITRATE = 128 * 1024;
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
private long encoderBufferDurationRemainder;
|
private long encoderBufferDurationRemainder;
|
||||||
|
|
||||||
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
// TODO(b/260618558): Move silent audio generation upstream of this component.
|
||||||
public AudioTranscodingSamplePipeline(
|
public AudioSamplePipeline(
|
||||||
Format inputFormat,
|
Format inputFormat,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
long streamOffsetUs,
|
long streamOffsetUs,
|
@ -21,14 +21,14 @@ import androidx.media3.common.Format;
|
|||||||
import androidx.media3.decoder.DecoderInputBuffer;
|
import androidx.media3.decoder.DecoderInputBuffer;
|
||||||
|
|
||||||
/** Pipeline that muxes encoded samples without any transcoding or transformation. */
|
/** Pipeline that muxes encoded samples without any transcoding or transformation. */
|
||||||
/* package */ final class PassthroughSamplePipeline extends SamplePipeline {
|
/* package */ final class EncodedSamplePipeline extends SamplePipeline {
|
||||||
|
|
||||||
private final DecoderInputBuffer buffer;
|
private final DecoderInputBuffer buffer;
|
||||||
private final Format format;
|
private final Format format;
|
||||||
|
|
||||||
private boolean hasPendingBuffer;
|
private boolean hasPendingBuffer;
|
||||||
|
|
||||||
public PassthroughSamplePipeline(
|
public EncodedSamplePipeline(
|
||||||
Format format,
|
Format format,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
TransformationRequest transformationRequest,
|
TransformationRequest transformationRequest,
|
@ -505,7 +505,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) == 0;
|
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_ENCODED) == 0;
|
||||||
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
|
boolean shouldUseTranscodingPipeline = shouldTranscode || assetLoaderAlwaysDecodes;
|
||||||
if (isAudio && shouldUseTranscodingPipeline) {
|
if (isAudio && shouldUseTranscodingPipeline) {
|
||||||
return new AudioTranscodingSamplePipeline(
|
return new AudioSamplePipeline(
|
||||||
inputFormat,
|
inputFormat,
|
||||||
streamStartPositionUs,
|
streamStartPositionUs,
|
||||||
streamOffsetUs,
|
streamOffsetUs,
|
||||||
@ -516,7 +516,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
muxerWrapper,
|
muxerWrapper,
|
||||||
fallbackListener);
|
fallbackListener);
|
||||||
} else if (shouldUseTranscodingPipeline) {
|
} else if (shouldUseTranscodingPipeline) {
|
||||||
return new VideoTranscodingSamplePipeline(
|
return new VideoSamplePipeline(
|
||||||
context,
|
context,
|
||||||
inputFormat,
|
inputFormat,
|
||||||
streamStartPositionUs,
|
streamStartPositionUs,
|
||||||
@ -530,7 +530,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
fallbackListener,
|
fallbackListener,
|
||||||
debugViewProvider);
|
debugViewProvider);
|
||||||
} else {
|
} else {
|
||||||
return new PassthroughSamplePipeline(
|
return new EncodedSamplePipeline(
|
||||||
inputFormat,
|
inputFormat,
|
||||||
streamStartPositionUs,
|
streamStartPositionUs,
|
||||||
transformationRequest,
|
transformationRequest,
|
||||||
|
@ -54,7 +54,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
|||||||
import org.checkerframework.dataflow.qual.Pure;
|
import org.checkerframework.dataflow.qual.Pure;
|
||||||
|
|
||||||
/** Pipeline to process, re-encode and mux raw video frames. */
|
/** Pipeline to process, re-encode and mux raw video frames. */
|
||||||
/* package */ final class VideoTranscodingSamplePipeline extends SamplePipeline {
|
/* package */ final class VideoSamplePipeline extends SamplePipeline {
|
||||||
|
|
||||||
private final FrameProcessor frameProcessor;
|
private final FrameProcessor frameProcessor;
|
||||||
private final ColorInfo frameProcessorInputColor;
|
private final ColorInfo frameProcessorInputColor;
|
||||||
@ -68,7 +68,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
*/
|
*/
|
||||||
private volatile long finalFramePresentationTimeUs;
|
private volatile long finalFramePresentationTimeUs;
|
||||||
|
|
||||||
public VideoTranscodingSamplePipeline(
|
public VideoSamplePipeline(
|
||||||
Context context,
|
Context context,
|
||||||
Format inputFormat,
|
Format inputFormat,
|
||||||
long streamStartPositionUs,
|
long streamStartPositionUs,
|
||||||
@ -208,7 +208,7 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFrameProcessingEnded() {
|
public void onFrameProcessingEnded() {
|
||||||
VideoTranscodingSamplePipeline.this.finalFramePresentationTimeUs =
|
VideoSamplePipeline.this.finalFramePresentationTimeUs =
|
||||||
lastProcessedFramePresentationTimeUs;
|
lastProcessedFramePresentationTimeUs;
|
||||||
try {
|
try {
|
||||||
encoderWrapper.signalEndOfInputStream();
|
encoderWrapper.signalEndOfInputStream();
|
@ -40,7 +40,7 @@ import org.robolectric.shadows.MediaCodecInfoBuilder;
|
|||||||
import org.robolectric.shadows.ShadowMediaCodec;
|
import org.robolectric.shadows.ShadowMediaCodec;
|
||||||
import org.robolectric.shadows.ShadowMediaCodecList;
|
import org.robolectric.shadows.ShadowMediaCodecList;
|
||||||
|
|
||||||
/** Unit tests for {@link VideoTranscodingSamplePipeline.EncoderWrapper}. */
|
/** Unit tests for {@link VideoSamplePipeline.EncoderWrapper}. */
|
||||||
@RunWith(AndroidJUnit4.class)
|
@RunWith(AndroidJUnit4.class)
|
||||||
public final class VideoEncoderWrapperTest {
|
public final class VideoEncoderWrapperTest {
|
||||||
private final TransformationRequest emptyTransformationRequest =
|
private final TransformationRequest emptyTransformationRequest =
|
||||||
@ -52,8 +52,8 @@ public final class VideoEncoderWrapperTest {
|
|||||||
new ListenerSet<>(Looper.myLooper(), Clock.DEFAULT, (listener, flags) -> {}),
|
new ListenerSet<>(Looper.myLooper(), Clock.DEFAULT, (listener, flags) -> {}),
|
||||||
Clock.DEFAULT.createHandler(Looper.myLooper(), /* callback= */ null),
|
Clock.DEFAULT.createHandler(Looper.myLooper(), /* callback= */ null),
|
||||||
emptyTransformationRequest);
|
emptyTransformationRequest);
|
||||||
private final VideoTranscodingSamplePipeline.EncoderWrapper encoderWrapper =
|
private final VideoSamplePipeline.EncoderWrapper encoderWrapper =
|
||||||
new VideoTranscodingSamplePipeline.EncoderWrapper(
|
new VideoSamplePipeline.EncoderWrapper(
|
||||||
fakeEncoderFactory,
|
fakeEncoderFactory,
|
||||||
/* inputFormat= */ new Format.Builder().setSampleMimeType(MimeTypes.VIDEO_H264).build(),
|
/* inputFormat= */ new Format.Builder().setSampleMimeType(MimeTypes.VIDEO_H264).build(),
|
||||||
/* muxerSupportedMimeTypes= */ ImmutableList.of(MimeTypes.VIDEO_H264),
|
/* muxerSupportedMimeTypes= */ ImmutableList.of(MimeTypes.VIDEO_H264),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user