Integrate ImageRenderer with composition preview
PiperOrigin-RevId: 591218013
This commit is contained in:
parent
7f6596bab2
commit
7df3e9e779
@ -33,11 +33,12 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
private final long durationUs;
|
||||
private final float frameRate;
|
||||
private final double framesDurationUs;
|
||||
private final long startingTimestampUs;
|
||||
private double currentTimestampUs;
|
||||
private int framesToAdd;
|
||||
|
||||
/**
|
||||
* Creates an instance.
|
||||
* Creates an instance that outputs timestamps from {@code 0}.
|
||||
*
|
||||
* @param durationUs The duration the timestamps should span over, in microseconds.
|
||||
* @param frameRate The frame rate in frames per second.
|
||||
@ -45,10 +46,27 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
public ConstantRateTimestampIterator(
|
||||
@IntRange(from = 1) long durationUs,
|
||||
@FloatRange(from = 0, fromInclusive = false) float frameRate) {
|
||||
this(durationUs, frameRate, /* startingTimestampUs= */ 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an instance that outputs timestamps from {@code startingTimestampUs}.
|
||||
*
|
||||
* @param durationUs The duration the timestamps should span over, in microseconds.
|
||||
* @param frameRate The frame rate in frames per second.
|
||||
* @param startingTimestampUs The first timestamp output from the iterator.
|
||||
*/
|
||||
public ConstantRateTimestampIterator(
|
||||
@IntRange(from = 1) long durationUs,
|
||||
@FloatRange(from = 0, fromInclusive = false) float frameRate,
|
||||
@IntRange(from = 0) long startingTimestampUs) {
|
||||
checkArgument(durationUs > 0);
|
||||
checkArgument(frameRate > 0);
|
||||
checkArgument(startingTimestampUs >= 0);
|
||||
this.durationUs = durationUs;
|
||||
this.frameRate = frameRate;
|
||||
this.startingTimestampUs = startingTimestampUs;
|
||||
this.currentTimestampUs = startingTimestampUs;
|
||||
framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
|
||||
framesDurationUs = C.MICROS_PER_SECOND / frameRate;
|
||||
}
|
||||
@ -69,6 +87,6 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
|
||||
@Override
|
||||
public ConstantRateTimestampIterator copyOf() {
|
||||
return new ConstantRateTimestampIterator(durationUs, frameRate);
|
||||
return new ConstantRateTimestampIterator(durationUs, frameRate, startingTimestampUs);
|
||||
}
|
||||
}
|
||||
|
@ -71,6 +71,28 @@ public class ConstantRateTimestampIteratorTest {
|
||||
assertThat(generateList(constantRateTimestampIterator)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void timestampIterator_withNonZeroStartingTime_firstOutputsStartingTimestamp() {
|
||||
ConstantRateTimestampIterator constantRateTimestampIterator =
|
||||
new ConstantRateTimestampIterator(
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* frameRate= */ 2,
|
||||
/* startingTimestampUs= */ 1234);
|
||||
|
||||
assertThat(constantRateTimestampIterator.next()).isEqualTo(1234);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void copyOf_withNonZeroStartingTime_firstOutputsStartingTimestamp() {
|
||||
ConstantRateTimestampIterator constantRateTimestampIterator =
|
||||
new ConstantRateTimestampIterator(
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
/* frameRate= */ 2,
|
||||
/* startingTimestampUs= */ 1234);
|
||||
|
||||
assertThat(constantRateTimestampIterator.copyOf().next()).isEqualTo(1234);
|
||||
}
|
||||
|
||||
private static List<Long> generateList(TimestampIterator iterator) {
|
||||
ArrayList<Long> list = new ArrayList<>();
|
||||
|
||||
|
@ -16,7 +16,6 @@
|
||||
|
||||
package androidx.media3.effect;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static androidx.media3.common.util.Assertions.checkStateNotNull;
|
||||
|
||||
@ -50,13 +49,12 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
private final DebugViewProvider debugViewProvider;
|
||||
private final Executor listenerExecutor;
|
||||
private final boolean renderFramesAutomatically;
|
||||
|
||||
private final long initialTimestampOffsetUs;
|
||||
@Nullable private final Presentation presentation;
|
||||
|
||||
@Nullable private VideoFrameProcessor videoFrameProcessor;
|
||||
@Nullable private SurfaceInfo outputSurfaceInfo;
|
||||
private boolean isEnded;
|
||||
|
||||
private boolean released;
|
||||
private volatile boolean hasProducedFrameWithTimestampZero;
|
||||
|
||||
@ -162,6 +160,9 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
() -> listener.onEnded(lastProcessedFramePresentationTimeUs));
|
||||
}
|
||||
});
|
||||
if (outputSurfaceInfo != null) {
|
||||
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
|
||||
}
|
||||
return SINGLE_INPUT_INDEX;
|
||||
}
|
||||
|
||||
@ -172,7 +173,10 @@ public abstract class SingleInputVideoGraph implements VideoGraph {
|
||||
|
||||
@Override
|
||||
public void setOutputSurfaceInfo(@Nullable SurfaceInfo outputSurfaceInfo) {
|
||||
checkNotNull(videoFrameProcessor).setOutputSurfaceInfo(outputSurfaceInfo);
|
||||
this.outputSurfaceInfo = outputSurfaceInfo;
|
||||
if (videoFrameProcessor != null) {
|
||||
videoFrameProcessor.setOutputSurfaceInfo(outputSurfaceInfo);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -26,6 +26,7 @@ import static java.lang.Math.min;
|
||||
import static java.lang.annotation.ElementType.TYPE_USE;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.os.SystemClock;
|
||||
import androidx.annotation.IntDef;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.Format;
|
||||
@ -51,7 +52,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
|
||||
|
||||
/** A {@link Renderer} implementation for images. */
|
||||
@UnstableApi
|
||||
public final class ImageRenderer extends BaseRenderer {
|
||||
public class ImageRenderer extends BaseRenderer {
|
||||
|
||||
private static final String TAG = "ImageRenderer";
|
||||
|
||||
@ -260,7 +261,7 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
&& getState() != STATE_STARTED) {
|
||||
return false;
|
||||
}
|
||||
if (checkNotNull(outputBuffer).isEndOfStream()) {
|
||||
if (checkStateNotNull(outputBuffer).isEndOfStream()) {
|
||||
offsetQueue.remove();
|
||||
if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
|
||||
// We're waiting to re-initialize the decoder, and have now processed all final buffers.
|
||||
@ -276,27 +277,40 @@ public final class ImageRenderer extends BaseRenderer {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
checkStateNotNull(outputBuffer);
|
||||
if (!processOutputBuffer(positionUs, elapsedRealtimeUs)) {
|
||||
|
||||
ImageOutputBuffer imageOutputBuffer = checkStateNotNull(outputBuffer);
|
||||
checkStateNotNull(
|
||||
imageOutputBuffer.bitmap, "Non-EOS buffer came back from the decoder without bitmap.");
|
||||
if (!processOutputBuffer(
|
||||
positionUs, elapsedRealtimeUs, imageOutputBuffer.bitmap, imageOutputBuffer.timeUs)) {
|
||||
return false;
|
||||
}
|
||||
checkStateNotNull(outputBuffer).release();
|
||||
outputBuffer = null;
|
||||
firstFrameState = FIRST_FRAME_RENDERED;
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused") // Will be used or removed when the integrated with the videoSink.
|
||||
@RequiresNonNull("outputBuffer")
|
||||
private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs) {
|
||||
Bitmap outputBitmap =
|
||||
checkNotNull(
|
||||
outputBuffer.bitmap, "Non-EOS buffer came back from the decoder without bitmap.");
|
||||
if (positionUs < outputBuffer.timeUs) {
|
||||
/**
|
||||
* Processes an output image.
|
||||
*
|
||||
* @param positionUs The current media time in microseconds, measured at the start of the current
|
||||
* iteration of the rendering loop.
|
||||
* @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at the
|
||||
* start of the current iteration of the rendering loop.
|
||||
* @param outputBitmap The {@link Bitmap}.
|
||||
* @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds.
|
||||
* @return Whether the output image was fully processed (for example, rendered or skipped).
|
||||
* @throws ExoPlaybackException If an error occurs processing the output buffer.
|
||||
*/
|
||||
protected boolean processOutputBuffer(
|
||||
long positionUs, long elapsedRealtimeUs, Bitmap outputBitmap, long bufferPresentationTimeUs)
|
||||
throws ExoPlaybackException {
|
||||
if (positionUs < bufferPresentationTimeUs) {
|
||||
// It's too early to render the buffer.
|
||||
return false;
|
||||
}
|
||||
imageOutput.onImageAvailable(outputBuffer.timeUs - offsetQueue.element(), outputBitmap);
|
||||
checkNotNull(outputBuffer).release();
|
||||
outputBuffer = null;
|
||||
imageOutput.onImageAvailable(bufferPresentationTimeUs - offsetQueue.element(), outputBitmap);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -282,6 +282,11 @@ public final class CompositingVideoSinkProvider
|
||||
/* listenerExecutor= */ handler::post,
|
||||
/* compositionEffects= */ ImmutableList.of(),
|
||||
/* initialTimestampOffsetUs= */ 0);
|
||||
if (currentSurfaceAndSize != null) {
|
||||
Surface surface = currentSurfaceAndSize.first;
|
||||
Size size = currentSurfaceAndSize.second;
|
||||
maybeSetOutputSurfaceInfo(surface, size.getWidth(), size.getHeight());
|
||||
}
|
||||
videoSinkImpl =
|
||||
new VideoSinkImpl(context, /* compositingVideoSinkProvider= */ this, videoGraph);
|
||||
} catch (VideoFrameProcessingException e) {
|
||||
@ -346,17 +351,17 @@ public final class CompositingVideoSinkProvider
|
||||
&& currentSurfaceAndSize.second.equals(outputResolution)) {
|
||||
return;
|
||||
}
|
||||
videoFrameReleaseControl.setOutputSurface(outputSurface);
|
||||
currentSurfaceAndSize = Pair.create(outputSurface, outputResolution);
|
||||
checkStateNotNull(videoGraph)
|
||||
.setOutputSurfaceInfo(
|
||||
new SurfaceInfo(
|
||||
outputSurface, outputResolution.getWidth(), outputResolution.getHeight()));
|
||||
maybeSetOutputSurfaceInfo(
|
||||
outputSurface, outputResolution.getWidth(), outputResolution.getHeight());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearOutputSurfaceInfo() {
|
||||
checkStateNotNull(videoGraph).setOutputSurfaceInfo(/* outputSurfaceInfo= */ null);
|
||||
maybeSetOutputSurfaceInfo(
|
||||
/* surface= */ null,
|
||||
/* width= */ Size.UNKNOWN.getWidth(),
|
||||
/* height= */ Size.UNKNOWN.getHeight());
|
||||
currentSurfaceAndSize = null;
|
||||
}
|
||||
|
||||
@ -455,6 +460,31 @@ public final class CompositingVideoSinkProvider
|
||||
checkStateNotNull(videoGraph).renderOutputFrame(VideoFrameProcessor.DROP_OUTPUT_FRAME);
|
||||
}
|
||||
|
||||
// Other public methods
|
||||
|
||||
/**
|
||||
* Incrementally renders available video frames.
|
||||
*
|
||||
* @param positionUs The current playback position, in microseconds.
|
||||
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
|
||||
* taken approximately at the time the playback position was {@code positionUs}.
|
||||
*/
|
||||
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
||||
if (pendingFlushCount == 0) {
|
||||
videoFrameRenderControl.render(positionUs, elapsedRealtimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the output surface that was {@linkplain #setOutputSurfaceInfo(Surface, Size) set}, or
|
||||
* {@code null} if no surface is set or the surface is {@linkplain #clearOutputSurfaceInfo()
|
||||
* cleared}.
|
||||
*/
|
||||
@Nullable
|
||||
public Surface getOutputSurface() {
|
||||
return currentSurfaceAndSize != null ? currentSurfaceAndSize.first : null;
|
||||
}
|
||||
|
||||
// Internal methods
|
||||
|
||||
private void setListener(VideoSink.Listener listener, Executor executor) {
|
||||
@ -467,6 +497,15 @@ public final class CompositingVideoSinkProvider
|
||||
this.listenerExecutor = executor;
|
||||
}
|
||||
|
||||
private void maybeSetOutputSurfaceInfo(@Nullable Surface surface, int width, int height) {
|
||||
if (videoGraph != null) {
|
||||
// Update the surface on the video graph and the video frame release control together.
|
||||
SurfaceInfo surfaceInfo = surface != null ? new SurfaceInfo(surface, width, height) : null;
|
||||
videoGraph.setOutputSurfaceInfo(surfaceInfo);
|
||||
videoFrameReleaseControl.setOutputSurface(surface);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isReady() {
|
||||
return pendingFlushCount == 0 && videoFrameRenderControl.isReady();
|
||||
}
|
||||
@ -475,12 +514,6 @@ public final class CompositingVideoSinkProvider
|
||||
return pendingFlushCount == 0 && videoFrameRenderControl.hasReleasedFrame(presentationTimeUs);
|
||||
}
|
||||
|
||||
private void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
|
||||
if (pendingFlushCount == 0) {
|
||||
videoFrameRenderControl.render(positionUs, elapsedRealtimeUs);
|
||||
}
|
||||
}
|
||||
|
||||
private void flush() {
|
||||
pendingFlushCount++;
|
||||
// Flush the render control now to ensure it has no data, eg calling isReady() must return false
|
||||
@ -522,6 +555,7 @@ public final class CompositingVideoSinkProvider
|
||||
@Nullable private Effect rotationEffect;
|
||||
|
||||
@Nullable private Format inputFormat;
|
||||
@InputType int inputType;
|
||||
private long inputStreamOffsetUs;
|
||||
private boolean pendingInputStreamOffsetChange;
|
||||
|
||||
@ -586,11 +620,16 @@ public final class CompositingVideoSinkProvider
|
||||
|
||||
@Override
|
||||
public void registerInputStream(@InputType int inputType, Format format) {
|
||||
if (inputType != INPUT_TYPE_SURFACE) {
|
||||
switch (inputType) {
|
||||
case INPUT_TYPE_SURFACE:
|
||||
case INPUT_TYPE_BITMAP:
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException("Unsupported input type " + inputType);
|
||||
}
|
||||
// MediaCodec applies rotation after API 21.
|
||||
if (Util.SDK_INT < 21
|
||||
if (inputType == INPUT_TYPE_SURFACE
|
||||
&& Util.SDK_INT < 21
|
||||
&& format.rotationDegrees != Format.NO_VALUE
|
||||
&& format.rotationDegrees != 0) {
|
||||
// We must apply a rotation effect.
|
||||
@ -603,6 +642,7 @@ public final class CompositingVideoSinkProvider
|
||||
} else {
|
||||
rotationEffect = null;
|
||||
}
|
||||
this.inputType = inputType;
|
||||
this.inputFormat = format;
|
||||
|
||||
if (!hasRegisteredFirstInputStream) {
|
||||
@ -678,8 +718,9 @@ public final class CompositingVideoSinkProvider
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
|
||||
throw new UnsupportedOperationException();
|
||||
public boolean queueBitmap(Bitmap inputBitmap, TimestampIterator timestampIterator) {
|
||||
return checkStateNotNull(videoFrameProcessor)
|
||||
.queueInputBitmap(inputBitmap, timestampIterator);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -714,7 +755,7 @@ public final class CompositingVideoSinkProvider
|
||||
this.videoEffects.addAll(videoEffects);
|
||||
}
|
||||
|
||||
/** Sets the stream offset, in micro seconds. */
|
||||
/** Sets the stream offset, in microseconds. */
|
||||
public void setStreamOffsetUs(long streamOffsetUs) {
|
||||
pendingInputStreamOffsetChange = inputStreamOffsetUs != streamOffsetUs;
|
||||
inputStreamOffsetUs = streamOffsetUs;
|
||||
@ -732,7 +773,7 @@ public final class CompositingVideoSinkProvider
|
||||
effects.addAll(videoEffects);
|
||||
Format inputFormat = checkNotNull(this.inputFormat);
|
||||
videoFrameProcessor.registerInputStream(
|
||||
VideoFrameProcessor.INPUT_TYPE_SURFACE,
|
||||
inputType,
|
||||
effects,
|
||||
new FrameInfo.Builder(inputFormat.width, inputFormat.height)
|
||||
.setPixelWidthHeightRatio(inputFormat.pixelWidthHeightRatio)
|
||||
|
@ -34,7 +34,7 @@ import java.util.concurrent.Executor;
|
||||
|
||||
/** A sink that consumes decoded video frames. */
|
||||
@UnstableApi
|
||||
/* package */ interface VideoSink {
|
||||
public interface VideoSink {
|
||||
|
||||
/** Thrown by {@link VideoSink} implementations. */
|
||||
final class VideoSinkException extends Exception {
|
||||
|
@ -109,20 +109,6 @@ public final class CompositingVideoSinkProviderTest {
|
||||
assertThrows(IllegalStateException.class, () -> provider.initialize(format));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void registerInputStream_withInputTypeBitmap_throws() throws VideoSink.VideoSinkException {
|
||||
CompositingVideoSinkProvider provider = createCompositingVideoSinkProvider();
|
||||
provider.setVideoEffects(ImmutableList.of());
|
||||
provider.initialize(new Format.Builder().build());
|
||||
VideoSink videoSink = provider.getSink();
|
||||
|
||||
assertThrows(
|
||||
UnsupportedOperationException.class,
|
||||
() ->
|
||||
videoSink.registerInputStream(
|
||||
VideoSink.INPUT_TYPE_BITMAP, new Format.Builder().build()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setOutputStreamOffsetUs_frameReleaseTimesAreAdjusted()
|
||||
throws VideoSink.VideoSinkException {
|
||||
|
Loading…
x
Reference in New Issue
Block a user