Update sample pipelines and frame processors to handle image input.

PiperOrigin-RevId: 506965394
This commit is contained in:
tofunmi 2023-02-03 20:21:06 +00:00 committed by microkatz
parent c45859dde1
commit 4e3c6c6167
4 changed files with 41 additions and 8 deletions

View File

@ -16,6 +16,7 @@
package androidx.media3.common; package androidx.media3.common;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.EGLExt; import android.opengl.EGLExt;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -123,6 +124,20 @@ public interface FrameProcessor {
/** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */ /** Indicates the frame should be dropped after {@link #releaseOutputFrame(long)} is invoked. */
long DROP_OUTPUT_FRAME = -2; long DROP_OUTPUT_FRAME = -2;
/**
* Provides an input {@link Bitmap} to the {@link FrameProcessor}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@link FrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
*/
// TODO(b/262693274): Remove duration & frameRate parameters when EditedMediaItem can be signalled
// down to the processors.
void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate);
/** /**
* Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from. * Returns the input {@link Surface}, where {@link FrameProcessor} consumes input frames from.
* *

View File

@ -21,6 +21,7 @@ import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static com.google.common.collect.Iterables.getLast; import static com.google.common.collect.Iterables.getLast;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.EGLDisplay; import android.opengl.EGLDisplay;
@ -409,6 +410,9 @@ public final class GlEffectsFrameProcessor implements FrameProcessor {
inputExternalTextureManager.getSurfaceTexture().setDefaultBufferSize(width, height); inputExternalTextureManager.getSurfaceTexture().setDefaultBufferSize(width, height);
} }
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {}
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return inputSurface; return inputSurface;

View File

@ -34,7 +34,7 @@ import androidx.media3.decoder.DecoderInputBuffer;
private final long streamStartPositionUs; private final long streamStartPositionUs;
private final MuxerWrapper muxerWrapper; private final MuxerWrapper muxerWrapper;
private final @C.TrackType int trackType; private final @C.TrackType int outputTrackType;
private boolean muxerWrapperTrackAdded; private boolean muxerWrapperTrackAdded;
@ -42,7 +42,10 @@ import androidx.media3.decoder.DecoderInputBuffer;
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) { Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
this.streamStartPositionUs = streamStartPositionUs; this.streamStartPositionUs = streamStartPositionUs;
this.muxerWrapper = muxerWrapper; this.muxerWrapper = muxerWrapper;
trackType = MimeTypes.getTrackType(firstInputFormat.sampleMimeType); outputTrackType =
MimeTypes.isImage(firstInputFormat.sampleMimeType)
? C.TRACK_TYPE_VIDEO
: MimeTypes.getTrackType(firstInputFormat.sampleMimeType);
} }
protected static TransformationException createNoSupportedMimeTypeException(Format format) { protected static TransformationException createNoSupportedMimeTypeException(Format format) {
@ -113,7 +116,7 @@ import androidx.media3.decoder.DecoderInputBuffer;
} }
if (isMuxerInputEnded()) { if (isMuxerInputEnded()) {
muxerWrapper.endTrack(trackType); muxerWrapper.endTrack(outputTrackType);
return false; return false;
} }
@ -127,7 +130,7 @@ import androidx.media3.decoder.DecoderInputBuffer;
// buffer from all samples so that they are guaranteed to start from zero in the output file. // buffer from all samples so that they are guaranteed to start from zero in the output file.
try { try {
if (!muxerWrapper.writeSample( if (!muxerWrapper.writeSample(
trackType, outputTrackType,
checkStateNotNull(muxerInputBuffer.data), checkStateNotNull(muxerInputBuffer.data),
muxerInputBuffer.isKeyFrame(), muxerInputBuffer.isKeyFrame(),
samplePresentationTimeUs)) { samplePresentationTimeUs)) {

View File

@ -25,6 +25,7 @@ import static androidx.media3.transformer.TransformationRequest.HDR_MODE_TONE_MA
import static androidx.media3.transformer.TransformationRequest.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL; import static androidx.media3.transformer.TransformationRequest.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -204,6 +205,11 @@ import org.checkerframework.dataflow.qual.Pure;
new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build()); new FrameInfo.Builder(firstFrameInfo).setOffsetToAddUs(mediaItemOffsetUs).build());
} }
@Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
frameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
@Override @Override
public Surface getInputSurface() { public Surface getInputSurface() {
return frameProcessor.getInputSurface(); return frameProcessor.getInputSurface();
@ -352,10 +358,15 @@ import org.checkerframework.dataflow.qual.Pure;
this.transformationRequest = transformationRequest; this.transformationRequest = transformationRequest;
this.fallbackListener = fallbackListener; this.fallbackListener = fallbackListener;
requestedOutputMimeType = String inputSampleMimeType = checkNotNull(inputFormat.sampleMimeType);
transformationRequest.videoMimeType != null
? transformationRequest.videoMimeType if (transformationRequest.videoMimeType != null) {
: checkNotNull(inputFormat.sampleMimeType); requestedOutputMimeType = transformationRequest.videoMimeType;
} else if (MimeTypes.isImage(inputSampleMimeType)) {
requestedOutputMimeType = MimeTypes.VIDEO_H265;
} else {
requestedOutputMimeType = inputSampleMimeType;
}
supportedEncoderNamesForHdrEditing = supportedEncoderNamesForHdrEditing =
EncoderUtil.getSupportedEncoderNamesForHdrEditing( EncoderUtil.getSupportedEncoderNamesForHdrEditing(
requestedOutputMimeType, inputFormat.colorInfo); requestedOutputMimeType, inputFormat.colorInfo);