Add basic SingleInputVideoGraph by wrapping DefaultVideoFrameProcessor
SingleInputVideoGraph implements GraphInput now, so the asset loaders would interface directly with SIVG, rather than VideoSampleExporter. This is to pave way for multi-asset video processing. PiperOrigin-RevId: 547561042
This commit is contained in:
parent
ab904bde2d
commit
18033c9c1b
@ -0,0 +1,251 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2023 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package androidx.media3.transformer;
|
||||||
|
|
||||||
|
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
|
||||||
|
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
|
||||||
|
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
|
||||||
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.graphics.Bitmap;
|
||||||
|
import android.view.Surface;
|
||||||
|
import androidx.annotation.Nullable;
|
||||||
|
import androidx.media3.common.ColorInfo;
|
||||||
|
import androidx.media3.common.DebugViewProvider;
|
||||||
|
import androidx.media3.common.Effect;
|
||||||
|
import androidx.media3.common.Format;
|
||||||
|
import androidx.media3.common.FrameInfo;
|
||||||
|
import androidx.media3.common.MimeTypes;
|
||||||
|
import androidx.media3.common.OnInputFrameProcessedListener;
|
||||||
|
import androidx.media3.common.SurfaceInfo;
|
||||||
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
|
import androidx.media3.common.VideoFrameProcessor;
|
||||||
|
import androidx.media3.common.util.Consumer;
|
||||||
|
import androidx.media3.common.util.Size;
|
||||||
|
import androidx.media3.effect.Presentation;
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
|
/** Processes decoded video frames from one single input. */
|
||||||
|
/* package */ final class SingleInputVideoGraph implements GraphInput {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listener for video frame processing events.
|
||||||
|
*
|
||||||
|
* <p>The methods are called from the GL thread.
|
||||||
|
*/
|
||||||
|
public interface Listener {
|
||||||
|
/**
|
||||||
|
* Called when the output size changes.
|
||||||
|
*
|
||||||
|
* @param width The new output width in pixels.
|
||||||
|
* @param height The new output width in pixels.
|
||||||
|
* @return A {@link SurfaceInfo} to which {@link SingleInputVideoGraph} renders to, or {@code
|
||||||
|
* null} if the output is not needed.
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
SurfaceInfo onOutputSizeChanged(int width, int height);
|
||||||
|
|
||||||
|
/** Called after the {@link SingleInputVideoGraph} has rendered its final output frame. */
|
||||||
|
void onEnded(long finalFramePresentationTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final VideoFrameProcessor videoFrameProcessor;
|
||||||
|
private final AtomicLong mediaItemOffsetUs;
|
||||||
|
private final ColorInfo inputColorInfo;
|
||||||
|
|
||||||
|
@Nullable final Presentation presentation;
|
||||||
|
|
||||||
|
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new instance.
|
||||||
|
*
|
||||||
|
* @param context A {@link Context}.
|
||||||
|
* @param videoFrameProcessorFactory A {@link VideoFrameProcessor.Factory}.
|
||||||
|
* @param inputColorInfo The {@link ColorInfo} for the input frames.
|
||||||
|
* @param outputColorInfo The {@link ColorInfo} for the output frames.
|
||||||
|
* @param listener A {@link Listener}.
|
||||||
|
* @param errorConsumer A {@link Consumer} of {@link ExportException}.
|
||||||
|
* @param debugViewProvider A {@link DebugViewProvider}.
|
||||||
|
* @param listenerExecutor An {@link Executor} on which {@link VideoFrameProcessor.Listener}
|
||||||
|
* methods are called.
|
||||||
|
* @param renderFramesAutomatically Whether to automatically render output frames. Use {@code
|
||||||
|
* false} when controlling the presentation of output frames.
|
||||||
|
* @param presentation A {@link Presentation} to apply to processed frames.
|
||||||
|
* @throws VideoFrameProcessingException When video frame processing fails.
|
||||||
|
*/
|
||||||
|
public SingleInputVideoGraph(
|
||||||
|
Context context,
|
||||||
|
VideoFrameProcessor.Factory videoFrameProcessorFactory,
|
||||||
|
ColorInfo inputColorInfo,
|
||||||
|
ColorInfo outputColorInfo,
|
||||||
|
Listener listener,
|
||||||
|
Consumer<ExportException> errorConsumer,
|
||||||
|
DebugViewProvider debugViewProvider,
|
||||||
|
Executor listenerExecutor,
|
||||||
|
boolean renderFramesAutomatically,
|
||||||
|
@Nullable Presentation presentation)
|
||||||
|
throws VideoFrameProcessingException {
|
||||||
|
this.mediaItemOffsetUs = new AtomicLong();
|
||||||
|
this.inputColorInfo = inputColorInfo;
|
||||||
|
this.presentation = presentation;
|
||||||
|
|
||||||
|
videoFrameProcessor =
|
||||||
|
videoFrameProcessorFactory.create(
|
||||||
|
context,
|
||||||
|
debugViewProvider,
|
||||||
|
inputColorInfo,
|
||||||
|
outputColorInfo,
|
||||||
|
renderFramesAutomatically,
|
||||||
|
listenerExecutor,
|
||||||
|
new VideoFrameProcessor.Listener() {
|
||||||
|
private long lastProcessedFramePresentationTimeUs;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputSizeChanged(int width, int height) {
|
||||||
|
// TODO: b/289986435 - Allow setting output surface info on VideoGraph.
|
||||||
|
checkNotNull(videoFrameProcessor)
|
||||||
|
.setOutputSurfaceInfo(listener.onOutputSizeChanged(width, height));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
||||||
|
// Frames are rendered automatically.
|
||||||
|
if (presentationTimeUs == 0) {
|
||||||
|
hasProducedFrameWithTimestampZero = true;
|
||||||
|
}
|
||||||
|
lastProcessedFramePresentationTimeUs = presentationTimeUs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(VideoFrameProcessingException exception) {
|
||||||
|
errorConsumer.accept(
|
||||||
|
ExportException.createForVideoFrameProcessingException(exception));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onEnded() {
|
||||||
|
listener.onEnded(lastProcessedFramePresentationTimeUs);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onMediaItemChanged(
|
||||||
|
EditedMediaItem editedMediaItem,
|
||||||
|
long durationUs,
|
||||||
|
@Nullable Format trackFormat,
|
||||||
|
boolean isLast) {
|
||||||
|
if (trackFormat != null) {
|
||||||
|
Size decodedSize = getDecodedSize(trackFormat);
|
||||||
|
videoFrameProcessor.registerInputStream(
|
||||||
|
getInputType(checkNotNull(trackFormat.sampleMimeType)),
|
||||||
|
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation));
|
||||||
|
videoFrameProcessor.setInputFrameInfo(
|
||||||
|
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
|
||||||
|
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
|
||||||
|
.setOffsetToAddUs(mediaItemOffsetUs.get())
|
||||||
|
.build());
|
||||||
|
}
|
||||||
|
mediaItemOffsetUs.addAndGet(durationUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
||||||
|
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
|
||||||
|
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean queueInputTexture(int texId, long presentationTimeUs) {
|
||||||
|
videoFrameProcessor.queueInputTexture(texId, presentationTimeUs);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Surface getInputSurface() {
|
||||||
|
return videoFrameProcessor.getInputSurface();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ColorInfo getExpectedInputColorInfo() {
|
||||||
|
return inputColorInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPendingVideoFrameCount() {
|
||||||
|
return videoFrameProcessor.getPendingInputFrameCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean registerVideoFrame(long presentationTimeUs) {
|
||||||
|
videoFrameProcessor.registerInputFrame();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void signalEndOfVideoInput() {
|
||||||
|
videoFrameProcessor.signalEndOfInput();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* package */ boolean hasProducedFrameWithTimestampZero() {
|
||||||
|
return hasProducedFrameWithTimestampZero;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void release() {
|
||||||
|
videoFrameProcessor.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
|
||||||
|
if (MimeTypes.isImage(sampleMimeType)) {
|
||||||
|
return INPUT_TYPE_BITMAP;
|
||||||
|
}
|
||||||
|
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
|
||||||
|
return INPUT_TYPE_TEXTURE_ID;
|
||||||
|
}
|
||||||
|
if (MimeTypes.isVideo(sampleMimeType)) {
|
||||||
|
return INPUT_TYPE_SURFACE;
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Size getDecodedSize(Format format) {
|
||||||
|
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
|
||||||
|
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
|
||||||
|
int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
|
||||||
|
return new Size(decodedWidth, decodedHeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ImmutableList<Effect> createEffectListWithPresentation(
|
||||||
|
List<Effect> effects, @Nullable Presentation presentation) {
|
||||||
|
if (presentation == null) {
|
||||||
|
return ImmutableList.copyOf(effects);
|
||||||
|
}
|
||||||
|
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
|
||||||
|
effectsWithPresentationBuilder.addAll(effects).add(presentation);
|
||||||
|
return effectsWithPresentationBuilder.build();
|
||||||
|
}
|
||||||
|
}
|
@ -19,9 +19,6 @@ package androidx.media3.transformer;
|
|||||||
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
|
import static androidx.media3.common.ColorInfo.SDR_BT709_LIMITED;
|
||||||
import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
|
import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
|
||||||
import static androidx.media3.common.ColorInfo.isTransferHdr;
|
import static androidx.media3.common.ColorInfo.isTransferHdr;
|
||||||
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_BITMAP;
|
|
||||||
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_SURFACE;
|
|
||||||
import static androidx.media3.common.VideoFrameProcessor.INPUT_TYPE_TEXTURE_ID;
|
|
||||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||||
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
|
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
|
||||||
@ -30,7 +27,6 @@ import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_S
|
|||||||
import static androidx.media3.transformer.EncoderUtil.getSupportedEncodersForHdrEditing;
|
import static androidx.media3.transformer.EncoderUtil.getSupportedEncodersForHdrEditing;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.graphics.Bitmap;
|
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecInfo;
|
||||||
import android.util.Pair;
|
import android.util.Pair;
|
||||||
@ -40,17 +36,13 @@ import androidx.annotation.VisibleForTesting;
|
|||||||
import androidx.media3.common.C;
|
import androidx.media3.common.C;
|
||||||
import androidx.media3.common.ColorInfo;
|
import androidx.media3.common.ColorInfo;
|
||||||
import androidx.media3.common.DebugViewProvider;
|
import androidx.media3.common.DebugViewProvider;
|
||||||
import androidx.media3.common.Effect;
|
|
||||||
import androidx.media3.common.Format;
|
import androidx.media3.common.Format;
|
||||||
import androidx.media3.common.FrameInfo;
|
|
||||||
import androidx.media3.common.MimeTypes;
|
import androidx.media3.common.MimeTypes;
|
||||||
import androidx.media3.common.OnInputFrameProcessedListener;
|
|
||||||
import androidx.media3.common.SurfaceInfo;
|
import androidx.media3.common.SurfaceInfo;
|
||||||
import androidx.media3.common.VideoFrameProcessingException;
|
import androidx.media3.common.VideoFrameProcessingException;
|
||||||
import androidx.media3.common.VideoFrameProcessor;
|
import androidx.media3.common.VideoFrameProcessor;
|
||||||
import androidx.media3.common.util.Consumer;
|
import androidx.media3.common.util.Consumer;
|
||||||
import androidx.media3.common.util.Log;
|
import androidx.media3.common.util.Log;
|
||||||
import androidx.media3.common.util.Size;
|
|
||||||
import androidx.media3.common.util.Util;
|
import androidx.media3.common.util.Util;
|
||||||
import androidx.media3.decoder.DecoderInputBuffer;
|
import androidx.media3.decoder.DecoderInputBuffer;
|
||||||
import androidx.media3.effect.DebugTraceUtil;
|
import androidx.media3.effect.DebugTraceUtil;
|
||||||
@ -60,23 +52,15 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.common.util.concurrent.MoreExecutors;
|
import com.google.common.util.concurrent.MoreExecutors;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
|
||||||
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||||
import org.checkerframework.dataflow.qual.Pure;
|
import org.checkerframework.dataflow.qual.Pure;
|
||||||
|
|
||||||
// TODO: b/289986435 - Remove implementations of GraphInput after creating VideoGraph.
|
|
||||||
/** Processes, encodes and muxes raw video frames. */
|
/** Processes, encodes and muxes raw video frames. */
|
||||||
/* package */ final class VideoSampleExporter extends SampleExporter implements GraphInput {
|
/* package */ final class VideoSampleExporter extends SampleExporter {
|
||||||
|
|
||||||
private static final String TAG = "VideoSampleExporter";
|
private static final String TAG = "VideoSampleExporter";
|
||||||
private final AtomicLong mediaItemOffsetUs;
|
private final SingleInputVideoGraph singleInputVideoGraph;
|
||||||
private final VideoFrameProcessor videoFrameProcessor;
|
|
||||||
private final ColorInfo videoFrameProcessorInputColor;
|
|
||||||
private final EncoderWrapper encoderWrapper;
|
private final EncoderWrapper encoderWrapper;
|
||||||
private final DecoderInputBuffer encoderOutputBuffer;
|
|
||||||
@Nullable final Presentation presentation;
|
|
||||||
|
|
||||||
private volatile boolean encoderExpectsTimestampZero;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The timestamp of the last buffer processed before {@linkplain
|
* The timestamp of the last buffer processed before {@linkplain
|
||||||
@ -84,6 +68,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
*/
|
*/
|
||||||
private volatile long finalFramePresentationTimeUs;
|
private volatile long finalFramePresentationTimeUs;
|
||||||
|
|
||||||
|
private boolean hasMuxedTimestampZero;
|
||||||
|
|
||||||
public VideoSampleExporter(
|
public VideoSampleExporter(
|
||||||
Context context,
|
Context context,
|
||||||
Format firstInputFormat,
|
Format firstInputFormat,
|
||||||
@ -100,12 +86,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
// output format instead of the extractor output format, to match AudioSampleExporter behavior.
|
// output format instead of the extractor output format, to match AudioSampleExporter behavior.
|
||||||
super(firstInputFormat, muxerWrapper);
|
super(firstInputFormat, muxerWrapper);
|
||||||
|
|
||||||
mediaItemOffsetUs = new AtomicLong();
|
|
||||||
finalFramePresentationTimeUs = C.TIME_UNSET;
|
finalFramePresentationTimeUs = C.TIME_UNSET;
|
||||||
|
|
||||||
encoderOutputBuffer =
|
|
||||||
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
|
||||||
|
|
||||||
ColorInfo decoderInputColor;
|
ColorInfo decoderInputColor;
|
||||||
if (firstInputFormat.colorInfo == null || !firstInputFormat.colorInfo.isValid()) {
|
if (firstInputFormat.colorInfo == null || !firstInputFormat.colorInfo.isValid()) {
|
||||||
Log.d(TAG, "colorInfo is null or invalid. Defaulting to SDR_BT709_LIMITED.");
|
Log.d(TAG, "colorInfo is null or invalid. Defaulting to SDR_BT709_LIMITED.");
|
||||||
@ -125,7 +107,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
boolean isMediaCodecToneMapping =
|
boolean isMediaCodecToneMapping =
|
||||||
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
|
encoderWrapper.getHdrModeAfterFallback() == HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC
|
||||||
&& ColorInfo.isTransferHdr(decoderInputColor);
|
&& ColorInfo.isTransferHdr(decoderInputColor);
|
||||||
videoFrameProcessorInputColor = isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
|
ColorInfo videoFrameProcessorInputColor =
|
||||||
|
isMediaCodecToneMapping ? SDR_BT709_LIMITED : decoderInputColor;
|
||||||
|
|
||||||
boolean isGlToneMapping =
|
boolean isGlToneMapping =
|
||||||
ColorInfo.isTransferHdr(decoderInputColor)
|
ColorInfo.isTransferHdr(decoderInputColor)
|
||||||
@ -149,131 +132,55 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
videoFrameProcessorOutputColor = videoFrameProcessorInputColor;
|
videoFrameProcessorOutputColor = videoFrameProcessorInputColor;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.presentation = presentation;
|
|
||||||
try {
|
try {
|
||||||
videoFrameProcessor =
|
singleInputVideoGraph =
|
||||||
videoFrameProcessorFactory.create(
|
new SingleInputVideoGraph(
|
||||||
context,
|
context,
|
||||||
debugViewProvider,
|
videoFrameProcessorFactory,
|
||||||
videoFrameProcessorInputColor,
|
videoFrameProcessorInputColor,
|
||||||
videoFrameProcessorOutputColor,
|
videoFrameProcessorOutputColor,
|
||||||
/* renderFramesAutomatically= */ true,
|
new SingleInputVideoGraph.Listener() {
|
||||||
MoreExecutors.directExecutor(),
|
@Nullable
|
||||||
new VideoFrameProcessor.Listener() {
|
|
||||||
private long lastProcessedFramePresentationTimeUs;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputSizeChanged(int width, int height) {
|
public SurfaceInfo onOutputSizeChanged(int width, int height) {
|
||||||
|
@Nullable SurfaceInfo surfaceInfo = null;
|
||||||
try {
|
try {
|
||||||
checkNotNull(videoFrameProcessor)
|
surfaceInfo = encoderWrapper.getSurfaceInfo(width, height);
|
||||||
.setOutputSurfaceInfo(encoderWrapper.getSurfaceInfo(width, height));
|
} catch (ExportException e) {
|
||||||
} catch (ExportException exception) {
|
errorConsumer.accept(e);
|
||||||
errorConsumer.accept(exception);
|
|
||||||
}
|
}
|
||||||
|
return surfaceInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onOutputFrameAvailableForRendering(long presentationTimeUs) {
|
public void onEnded(long finalFramePresentationTimeUs) {
|
||||||
// Frames are rendered automatically.
|
|
||||||
if (presentationTimeUs == 0) {
|
|
||||||
encoderExpectsTimestampZero = true;
|
|
||||||
}
|
|
||||||
lastProcessedFramePresentationTimeUs = presentationTimeUs;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onError(VideoFrameProcessingException exception) {
|
|
||||||
errorConsumer.accept(
|
|
||||||
ExportException.createForVideoFrameProcessingException(exception));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onEnded() {
|
|
||||||
VideoSampleExporter.this.finalFramePresentationTimeUs =
|
VideoSampleExporter.this.finalFramePresentationTimeUs =
|
||||||
lastProcessedFramePresentationTimeUs;
|
finalFramePresentationTimeUs;
|
||||||
try {
|
try {
|
||||||
encoderWrapper.signalEndOfInputStream();
|
encoderWrapper.signalEndOfInputStream();
|
||||||
} catch (ExportException exception) {
|
} catch (ExportException e) {
|
||||||
errorConsumer.accept(exception);
|
errorConsumer.accept(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
},
|
||||||
|
errorConsumer,
|
||||||
|
debugViewProvider,
|
||||||
|
MoreExecutors.directExecutor(),
|
||||||
|
/* renderFramesAutomatically= */ true,
|
||||||
|
presentation);
|
||||||
} catch (VideoFrameProcessingException e) {
|
} catch (VideoFrameProcessingException e) {
|
||||||
throw ExportException.createForVideoFrameProcessingException(e);
|
throw ExportException.createForVideoFrameProcessingException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onMediaItemChanged(
|
|
||||||
EditedMediaItem editedMediaItem,
|
|
||||||
long durationUs,
|
|
||||||
@Nullable Format trackFormat,
|
|
||||||
boolean isLast) {
|
|
||||||
if (trackFormat != null) {
|
|
||||||
Size decodedSize = getDecodedSize(trackFormat);
|
|
||||||
videoFrameProcessor.registerInputStream(
|
|
||||||
getInputType(checkNotNull(trackFormat.sampleMimeType)),
|
|
||||||
createEffectListWithPresentation(editedMediaItem.effects.videoEffects, presentation));
|
|
||||||
videoFrameProcessor.setInputFrameInfo(
|
|
||||||
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
|
|
||||||
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
|
|
||||||
.setOffsetToAddUs(mediaItemOffsetUs.get())
|
|
||||||
.build());
|
|
||||||
}
|
|
||||||
mediaItemOffsetUs.addAndGet(durationUs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
|
||||||
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
|
|
||||||
videoFrameProcessor.setOnInputFrameProcessedListener(listener);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean queueInputTexture(int texId, long presentationTimeUs) {
|
|
||||||
videoFrameProcessor.queueInputTexture(texId, presentationTimeUs);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Surface getInputSurface() {
|
|
||||||
return videoFrameProcessor.getInputSurface();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ColorInfo getExpectedInputColorInfo() {
|
|
||||||
return videoFrameProcessorInputColor;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean registerVideoFrame(long presentationTimeUs) {
|
|
||||||
videoFrameProcessor.registerInputFrame();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getPendingVideoFrameCount() {
|
|
||||||
return videoFrameProcessor.getPendingInputFrameCount();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void signalEndOfVideoInput() {
|
|
||||||
videoFrameProcessor.signalEndOfInput();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public GraphInput getInput() {
|
public GraphInput getInput() {
|
||||||
return this;
|
return singleInputVideoGraph;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {
|
public void release() {
|
||||||
videoFrameProcessor.release();
|
singleInputVideoGraph.release();
|
||||||
encoderWrapper.release();
|
encoderWrapper.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -286,6 +193,8 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
@Override
|
@Override
|
||||||
@Nullable
|
@Nullable
|
||||||
protected DecoderInputBuffer getMuxerInputBuffer() throws ExportException {
|
protected DecoderInputBuffer getMuxerInputBuffer() throws ExportException {
|
||||||
|
DecoderInputBuffer encoderOutputBuffer =
|
||||||
|
new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
|
||||||
encoderOutputBuffer.data = encoderWrapper.getOutputBuffer();
|
encoderOutputBuffer.data = encoderWrapper.getOutputBuffer();
|
||||||
if (encoderOutputBuffer.data == null) {
|
if (encoderOutputBuffer.data == null) {
|
||||||
return null;
|
return null;
|
||||||
@ -295,10 +204,12 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
// Internal ref b/235045165: Some encoder incorrectly set a zero presentation time on the
|
// Internal ref b/235045165: Some encoder incorrectly set a zero presentation time on the
|
||||||
// penultimate buffer (before EOS), and sets the actual timestamp on the EOS buffer. Use the
|
// penultimate buffer (before EOS), and sets the actual timestamp on the EOS buffer. Use the
|
||||||
// last processed frame presentation time instead.
|
// last processed frame presentation time instead.
|
||||||
if (encoderExpectsTimestampZero) {
|
if (singleInputVideoGraph.hasProducedFrameWithTimestampZero() == hasMuxedTimestampZero
|
||||||
encoderExpectsTimestampZero = false;
|
&& finalFramePresentationTimeUs != C.TIME_UNSET
|
||||||
} else if (finalFramePresentationTimeUs != C.TIME_UNSET && bufferInfo.size > 0) {
|
&& bufferInfo.size > 0) {
|
||||||
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
|
bufferInfo.presentationTimeUs = finalFramePresentationTimeUs;
|
||||||
|
} else {
|
||||||
|
hasMuxedTimestampZero = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DebugTraceUtil.recordEncodedFrame();
|
DebugTraceUtil.recordEncodedFrame();
|
||||||
@ -317,36 +228,6 @@ import org.checkerframework.dataflow.qual.Pure;
|
|||||||
return encoderWrapper.isEnded();
|
return encoderWrapper.isEnded();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static @VideoFrameProcessor.InputType int getInputType(String sampleMimeType) {
|
|
||||||
if (MimeTypes.isImage(sampleMimeType)) {
|
|
||||||
return INPUT_TYPE_BITMAP;
|
|
||||||
}
|
|
||||||
if (sampleMimeType.equals(MimeTypes.VIDEO_RAW)) {
|
|
||||||
return INPUT_TYPE_TEXTURE_ID;
|
|
||||||
}
|
|
||||||
if (MimeTypes.isVideo(sampleMimeType)) {
|
|
||||||
return INPUT_TYPE_SURFACE;
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("MIME type not supported " + sampleMimeType);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Size getDecodedSize(Format format) {
|
|
||||||
// The decoder rotates encoded frames for display by firstInputFormat.rotationDegrees.
|
|
||||||
int decodedWidth = (format.rotationDegrees % 180 == 0) ? format.width : format.height;
|
|
||||||
int decodedHeight = (format.rotationDegrees % 180 == 0) ? format.height : format.width;
|
|
||||||
return new Size(decodedWidth, decodedHeight);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static ImmutableList<Effect> createEffectListWithPresentation(
|
|
||||||
List<Effect> effects, @Nullable Presentation presentation) {
|
|
||||||
if (presentation == null) {
|
|
||||||
return ImmutableList.copyOf(effects);
|
|
||||||
}
|
|
||||||
ImmutableList.Builder<Effect> effectsWithPresentationBuilder = new ImmutableList.Builder<>();
|
|
||||||
effectsWithPresentationBuilder.addAll(effects).add(presentation);
|
|
||||||
return effectsWithPresentationBuilder.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wraps an {@linkplain Codec encoder} and provides its input {@link Surface}.
|
* Wraps an {@linkplain Codec encoder} and provides its input {@link Surface}.
|
||||||
*
|
*
|
||||||
|
Loading…
x
Reference in New Issue
Block a user