Remove queueInputBitmap by framerate and duration interfaces

PiperOrigin-RevId: 559384011
This commit is contained in:
tofunmi 2023-08-23 12:27:28 +01:00 committed by Ian Baker
parent 64c2406c1e
commit 404a259295
16 changed files with 168 additions and 227 deletions

View File

@ -13,6 +13,8 @@
object request (`nor`) and next range request (`nrr`) object request (`nor`) and next range request (`nrr`)
([#8699](https://github.com/google/ExoPlayer/issues/8699)). ([#8699](https://github.com/google/ExoPlayer/issues/8699)).
* Transformer: * Transformer:
* Changed `frameRate` and `durationUs` parameters of
`SampleConsumer.queueInputBitmap` to `TimestampIterator`.
* Track Selection: * Track Selection:
* Extractors: * Extractors:
* Audio: * Audio:
@ -22,6 +24,8 @@
* Metadata: * Metadata:
* DRM: * DRM:
* Effect: * Effect:
* Changed `frameRate` and `durationUs` parameters of
`VideoFrameProcessor.queueInputBitmap` to `TimestampIterator`.
* Muxers: * Muxers:
* IMA extension: * IMA extension:
* Session: * Session:

View File

@ -150,26 +150,6 @@ public interface VideoFrameProcessor {
/** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */ /** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */
long DROP_OUTPUT_FRAME = -2; long DROP_OUTPUT_FRAME = -2;
/**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor} to generate an input stream
* of frames.
*
* <p>Each call must be made after {@linkplain #registerInputStream registering a new input
* stream}.
*
* <p>Can be called on any thread.
*
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
* {@linkplain #INPUT_TYPE_BITMAP bitmap input}.
*/
// TODO(b/262693274): Delete this method and usages in favor of the one below (Note it is not
// deprecated because transformer still relies on this method for frame duplication).
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
/** /**
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}. * Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
* *

View File

@ -30,6 +30,8 @@ import androidx.media3.common.C;
@UnstableApi @UnstableApi
public final class ConstantRateTimestampIterator implements TimestampIterator { public final class ConstantRateTimestampIterator implements TimestampIterator {
private final long durationUs;
private final float frameRate;
private final double framesDurationUs; private final double framesDurationUs;
private double currentTimestampUs; private double currentTimestampUs;
private int framesToAdd; private int framesToAdd;
@ -45,6 +47,8 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
@FloatRange(from = 0, fromInclusive = false) float frameRate) { @FloatRange(from = 0, fromInclusive = false) float frameRate) {
checkArgument(durationUs > 0); checkArgument(durationUs > 0);
checkArgument(frameRate > 0); checkArgument(frameRate > 0);
this.durationUs = durationUs;
this.frameRate = frameRate;
framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND)); framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
framesDurationUs = C.MICROS_PER_SECOND / frameRate; framesDurationUs = C.MICROS_PER_SECOND / frameRate;
} }
@ -62,4 +66,9 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
currentTimestampUs += framesDurationUs; currentTimestampUs += framesDurationUs;
return next; return next;
} }
@Override
public ConstantRateTimestampIterator copyOf() {
return new ConstantRateTimestampIterator(durationUs, frameRate);
}
} }

View File

@ -15,8 +15,6 @@
*/ */
package androidx.media3.common.util; package androidx.media3.common.util;
import java.util.Iterator;
/** A primitive long iterator used for generating sequences of timestamps. */ /** A primitive long iterator used for generating sequences of timestamps. */
@UnstableApi @UnstableApi
public interface TimestampIterator { public interface TimestampIterator {
@ -27,18 +25,6 @@ public interface TimestampIterator {
/** Returns the next timestamp. */ /** Returns the next timestamp. */
long next(); long next();
/** Creates TimestampIterator */ /** Returns fresh copy of the iterator. */
static TimestampIterator createFromLongIterator(Iterator<Long> iterator) { TimestampIterator copyOf();
return new TimestampIterator() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public long next() {
return iterator.next();
}
};
}
} }

View File

@ -16,7 +16,6 @@
package androidx.media3.effect; package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.TimestampIterator.createFromLongIterator;
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap; import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
@ -24,9 +23,12 @@ import android.graphics.Bitmap;
import android.util.Pair; import android.util.Pair;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.ColorInfo; import androidx.media3.common.ColorInfo;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.test.utils.VideoFrameProcessorTestRunner; import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import java.util.Iterator;
import java.util.List;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -210,14 +212,36 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
videoFrameProcessorTestRunner.queueInputBitmaps( videoFrameProcessorTestRunner.queueInputBitmaps(
bitmap1.getWidth(), bitmap1.getWidth(),
bitmap1.getHeight(), bitmap1.getHeight(),
Pair.create(bitmap1, createFromLongIterator(ImmutableList.of(offset1).iterator())), Pair.create(bitmap1, createTimestampIterator(ImmutableList.of(offset1))),
Pair.create( Pair.create(bitmap2, createTimestampIterator(ImmutableList.of(offset2, offset3))));
bitmap2, createFromLongIterator(ImmutableList.of(offset2, offset3).iterator())));
videoFrameProcessorTestRunner.endFrameProcessing(); videoFrameProcessorTestRunner.endFrameProcessing();
assertThat(actualPresentationTimesUs).containsExactly(offset1, offset2, offset3).inOrder(); assertThat(actualPresentationTimesUs).containsExactly(offset1, offset2, offset3).inOrder();
} }
private static TimestampIterator createTimestampIterator(List<Long> elements) {
Iterator<Long> elementsIterator = elements.iterator();
return new TimestampIterator() {
@Override
public boolean hasNext() {
return elementsIterator.hasNext();
}
@Override
public long next() {
return elementsIterator.next();
}
@Override
public TimestampIterator copyOf() {
// Method not needed for effects tests.
throw new UnsupportedOperationException();
}
};
}
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
String testId) { String testId) {
return new VideoFrameProcessorTestRunner.Builder() return new VideoFrameProcessorTestRunner.Builder()

View File

@ -18,7 +18,6 @@ package androidx.media3.effect;
import static androidx.media3.common.util.Assertions.checkArgument; import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull; import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState; import static androidx.media3.common.util.Assertions.checkState;
import static java.lang.Math.round;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.opengl.GLES20; import android.opengl.GLES20;
@ -30,6 +29,7 @@ import androidx.media3.common.GlObjectsProvider;
import androidx.media3.common.GlTextureInfo; import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import java.util.Queue; import java.util.Queue;
@ -57,10 +57,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo; private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
private int downstreamShaderProgramCapacity; private int downstreamShaderProgramCapacity;
private int framesToQueueForCurrentBitmap;
private double currentPresentationTimeUs;
private boolean useHdr; private boolean useHdr;
private boolean currentInputStreamEnded; private boolean currentInputStreamEnded;
private boolean isNextFrameInTexture;
/** /**
* Creates a new instance. * Creates a new instance.
@ -92,10 +91,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void queueInputBitmap( public void queueInputBitmap(
Bitmap inputBitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr) { Bitmap inputBitmap,
FrameInfo frameInfo,
TimestampIterator inStreamOffsetsUs,
boolean useHdr) {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
setupBitmap(inputBitmap, durationUs, frameInfo, frameRate, useHdr); setupBitmap(inputBitmap, frameInfo, inStreamOffsetsUs, useHdr);
currentInputStreamEnded = false; currentInputStreamEnded = false;
}); });
} }
@ -110,7 +112,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
public void signalEndOfCurrentInputStream() { public void signalEndOfCurrentInputStream() {
videoFrameProcessingTaskExecutor.submit( videoFrameProcessingTaskExecutor.submit(
() -> { () -> {
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) { if (pendingBitmaps.isEmpty()) {
shaderProgram.signalEndOfCurrentInputStream(); shaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE); DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
@ -137,7 +139,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
// Methods that must be called on the GL thread. // Methods that must be called on the GL thread.
private void setupBitmap( private void setupBitmap(
Bitmap bitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr) Bitmap bitmap, FrameInfo frameInfo, TimestampIterator inStreamOffsetsUs, boolean useHdr)
throws VideoFrameProcessingException { throws VideoFrameProcessingException {
if (Util.SDK_INT >= 26) { if (Util.SDK_INT >= 26) {
checkState( checkState(
@ -147,17 +149,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
checkState( checkState(
!bitmap.getConfig().equals(Bitmap.Config.RGBA_1010102), UNSUPPORTED_IMAGE_CONFIGURATION); !bitmap.getConfig().equals(Bitmap.Config.RGBA_1010102), UNSUPPORTED_IMAGE_CONFIGURATION);
} }
this.useHdr = useHdr; this.useHdr = useHdr;
// TODO(b/262693274): move frame duplication logic out of the texture manager. Note this will checkArgument(inStreamOffsetsUs.hasNext(), "Bitmap queued but no timestamps provided.");
// involve removing the BitmapFrameSequenceInfo queue and using the FrameConsumptionManager pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, frameInfo, inStreamOffsetsUs));
// instead. It will also remove the framesToAdd variable
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
// framestoAdd > 0 otherwise the VFP will hang.
checkArgument(framesToAdd > 0);
double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
pendingBitmaps.add(
new BitmapFrameSequenceInfo(bitmap, frameInfo, frameDurationUs, framesToAdd));
maybeQueueToShaderProgram(); maybeQueueToShaderProgram();
} }
@ -167,51 +161,29 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek()); BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek());
if (framesToQueueForCurrentBitmap == 0) { FrameInfo currentFrameInfo = currentBitmapInfo.frameInfo;
Bitmap bitmap = currentBitmapInfo.bitmap; TimestampIterator inStreamOffsetsUs = currentBitmapInfo.inStreamOffsetsUs;
framesToQueueForCurrentBitmap = currentBitmapInfo.numberOfFrames; checkState(currentBitmapInfo.inStreamOffsetsUs.hasNext());
currentPresentationTimeUs = currentBitmapInfo.frameInfo.offsetToAddUs; long currentPresentationTimeUs =
int currentTexId; currentBitmapInfo.frameInfo.offsetToAddUs + inStreamOffsetsUs.next();
try { if (!isNextFrameInTexture) {
if (currentGlTextureInfo != null) { isNextFrameInTexture = true;
currentGlTextureInfo.release(); updateCurrentGlTextureInfo(currentFrameInfo, currentBitmapInfo.bitmap);
}
currentTexId =
GlUtil.createTexture(
currentBitmapInfo.frameInfo.width,
currentBitmapInfo.frameInfo.height,
/* useHighPrecisionColorComponents= */ useHdr);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, currentTexId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
}
currentGlTextureInfo =
new GlTextureInfo(
currentTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
currentBitmapInfo.frameInfo.width,
currentBitmapInfo.frameInfo.height);
} }
framesToQueueForCurrentBitmap--;
downstreamShaderProgramCapacity--; downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame( shaderProgram.queueInputFrame(
glObjectsProvider, checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs)); glObjectsProvider, checkNotNull(currentGlTextureInfo), currentPresentationTimeUs);
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP, DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP,
(long) currentPresentationTimeUs, currentPresentationTimeUs,
/* extra= */ currentBitmapInfo.frameInfo.width + "x" + currentBitmapInfo.frameInfo.height); /* extra= */ currentFrameInfo.width + "x" + currentFrameInfo.height);
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) { if (!currentBitmapInfo.inStreamOffsetsUs.hasNext()) {
isNextFrameInTexture = false;
pendingBitmaps.remove(); pendingBitmaps.remove();
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) { if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
// Only signal end of stream after all pending bitmaps are processed. // Only signal end of stream after all pending bitmaps are processed.
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
shaderProgram.signalEndOfCurrentInputStream(); shaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent( DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE); DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
@ -220,19 +192,42 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
} }
/** Information to generate all the frames associated with a specific {@link Bitmap}. */ /** Information needed to generate all the frames associated with a specific {@link Bitmap}. */
private static final class BitmapFrameSequenceInfo { private static final class BitmapFrameSequenceInfo {
public final Bitmap bitmap; public final Bitmap bitmap;
public final FrameInfo frameInfo; private final FrameInfo frameInfo;
public final double frameDurationUs; private final TimestampIterator inStreamOffsetsUs;
public final int numberOfFrames;
public BitmapFrameSequenceInfo( public BitmapFrameSequenceInfo(
Bitmap bitmap, FrameInfo frameInfo, double frameDurationUs, int numberOfFrames) { Bitmap bitmap, FrameInfo frameInfo, TimestampIterator inStreamOffsetsUs) {
this.bitmap = bitmap; this.bitmap = bitmap;
this.frameInfo = frameInfo; this.frameInfo = frameInfo;
this.frameDurationUs = frameDurationUs; this.inStreamOffsetsUs = inStreamOffsetsUs;
this.numberOfFrames = numberOfFrames;
} }
} }
private void updateCurrentGlTextureInfo(FrameInfo frameInfo, Bitmap bitmap)
throws VideoFrameProcessingException {
int currentTexId;
try {
if (currentGlTextureInfo != null) {
currentGlTextureInfo.release();
}
currentTexId =
GlUtil.createTexture(
frameInfo.width, frameInfo.height, /* useHighPrecisionColorComponents= */ useHdr);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, currentTexId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
throw VideoFrameProcessingException.from(e);
}
currentGlTextureInfo =
new GlTextureInfo(
currentTexId,
/* fboId= */ C.INDEX_UNSET,
/* rboId= */ C.INDEX_UNSET,
frameInfo.width,
frameInfo.height);
}
} }

View File

@ -349,7 +349,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
private volatile @MonotonicNonNull CountDownLatch latch; private volatile @MonotonicNonNull CountDownLatch latch;
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo; private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
private volatile boolean inputStreamEnded; private volatile boolean inputStreamEnded;
private volatile boolean hasRefreshedNextInputFrameInfo;
private DefaultVideoFrameProcessor( private DefaultVideoFrameProcessor(
Context context, Context context,
@ -422,39 +421,15 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
} }
@Override @Override
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) { public void queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
checkState( FrameInfo frameInfo = checkNotNull(this.nextInputFrameInfo);
hasRefreshedNextInputFrameInfo,
"registerInputStream must be called before queueing another bitmap");
inputSwitcher inputSwitcher
.activeTextureManager() .activeTextureManager()
.queueInputBitmap( .queueInputBitmap(
inputBitmap, inputBitmap,
durationUs, new FrameInfo.Builder(frameInfo).setOffsetToAddUs(frameInfo.offsetToAddUs).build(),
checkNotNull(nextInputFrameInfo), inStreamOffsetsUs,
frameRate,
/* useHdr= */ false); /* useHdr= */ false);
hasRefreshedNextInputFrameInfo = false;
}
@Override
public void queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
FrameInfo frameInfo = checkNotNull(this.nextInputFrameInfo);
// TODO(b/262693274): move frame duplication logic out of the texture manager so
// textureManager.queueInputBitmap() frame rate and duration parameters be removed.
while (inStreamOffsetsUs.hasNext()) {
long inStreamOffsetUs = inStreamOffsetsUs.next();
inputSwitcher
.activeTextureManager()
.queueInputBitmap(
inputBitmap,
/* durationUs= */ C.MICROS_PER_SECOND,
new FrameInfo.Builder(frameInfo)
.setOffsetToAddUs(frameInfo.offsetToAddUs + inStreamOffsetUs)
.build(),
/* frameRate= */ 1,
/* useHdr= */ false);
}
} }
@Override @Override
@ -482,7 +457,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
"InputType %s - %dx%d", "InputType %s - %dx%d",
getInputTypeString(inputType), frameInfo.width, frameInfo.height)); getInputTypeString(inputType), frameInfo.width, frameInfo.height));
nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo); nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
hasRefreshedNextInputFrameInfo = true;
synchronized (lock) { synchronized (lock) {
if (!processingInput) { if (!processingInput) {
videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects)); videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
@ -524,7 +498,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
nextInputFrameInfo, "registerInputStream must be called before registering input frames"); nextInputFrameInfo, "registerInputStream must be called before registering input frames");
inputSwitcher.activeTextureManager().registerInputFrame(nextInputFrameInfo); inputSwitcher.activeTextureManager().registerInputFrame(nextInputFrameInfo);
hasRefreshedNextInputFrameInfo = false;
} }
@Override @Override

View File

@ -25,6 +25,7 @@ import androidx.media3.common.FrameInfo;
import androidx.media3.common.OnInputFrameProcessedListener; import androidx.media3.common.OnInputFrameProcessedListener;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.TimestampIterator;
/** Handles {@code DefaultVideoFrameProcessor}'s input. */ /** Handles {@code DefaultVideoFrameProcessor}'s input. */
/* package */ interface TextureManager extends GlShaderProgram.InputListener { /* package */ interface TextureManager extends GlShaderProgram.InputListener {
@ -42,13 +43,16 @@ import androidx.media3.common.VideoFrameProcessor;
* Provides an input {@link Bitmap} to put into the video frames. * Provides an input {@link Bitmap} to put into the video frames.
* *
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}. * @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
* @param durationUs The duration of the bitmap in the composition, in microseconds.
* @param frameInfo Information about the bitmap being queued. * @param frameInfo Information about the bitmap being queued.
* @param frameRate The rate at which to generate frames with the bitmap, in frames per second. * @param inStreamOffsetsUs The times within the current stream that the bitmap should be shown
* at. The timestamps should be monotonically increasing.
* @param useHdr Whether input and/or output colors are HDR. * @param useHdr Whether input and/or output colors are HDR.
*/ */
default void queueInputBitmap( default void queueInputBitmap(
Bitmap inputBitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr) { Bitmap inputBitmap,
FrameInfo frameInfo,
TimestampIterator inStreamOffsetsUs,
boolean useHdr) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }

View File

@ -44,6 +44,7 @@ import androidx.media3.common.GlTextureInfo;
import androidx.media3.common.SurfaceInfo; import androidx.media3.common.SurfaceInfo;
import androidx.media3.common.VideoFrameProcessingException; import androidx.media3.common.VideoFrameProcessingException;
import androidx.media3.common.VideoFrameProcessor; import androidx.media3.common.VideoFrameProcessor;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.common.util.GlUtil; import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.TimestampIterator; import androidx.media3.common.util.TimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
@ -254,8 +255,7 @@ public final class VideoFrameProcessorTestRunner {
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException; private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
private final VideoFrameProcessor videoFrameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final ImmutableList<Effect> effects; private final ImmutableList<Effect> effects;
private final @MonotonicNonNull BitmapReader bitmapReader;
private @MonotonicNonNull BitmapReader bitmapReader;
private VideoFrameProcessorTestRunner( private VideoFrameProcessorTestRunner(
String testId, String testId,
@ -356,7 +356,8 @@ public final class VideoFrameProcessorTestRunner {
.setPixelWidthHeightRatio(pixelWidthHeightRatio) .setPixelWidthHeightRatio(pixelWidthHeightRatio)
.setOffsetToAddUs(offsetToAddUs) .setOffsetToAddUs(offsetToAddUs)
.build()); .build());
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate); videoFrameProcessor.queueInputBitmap(
inputBitmap, new ConstantRateTimestampIterator(durationUs, frameRate));
} }
public void queueInputBitmaps(int width, int height, Pair<Bitmap, TimestampIterator>... frames) { public void queueInputBitmaps(int width, int height, Pair<Bitmap, TimestampIterator>... frames) {

View File

@ -622,8 +622,8 @@ public class TransformerEndToEndTest {
assertThat(result.exportResult.processedInputs).hasSize(7); assertThat(result.exportResult.processedInputs).hasSize(7);
assertThat(result.exportResult.channelCount).isEqualTo(1); assertThat(result.exportResult.channelCount).isEqualTo(1);
assertThat(result.exportResult.videoFrameCount).isEqualTo(94); assertThat(result.exportResult.durationMs).isEqualTo(3133);
assertThat(result.exportResult.durationMs).isEqualTo(3100); assertThat(result.exportResult.videoFrameCount).isEqualTo(95);
} }
@Test @Test

View File

@ -216,7 +216,7 @@ public final class EditedMediaItem {
public final long durationUs; public final long durationUs;
/** The frame rate of the image in the output video, in frames per second. */ /** The frame rate of the image in the output video, in frames per second. */
@IntRange(from = 0) @IntRange(from = 1)
public final int frameRate; public final int frameRate;
/** The {@link Effects} to apply to the {@link #mediaItem}. */ /** The {@link Effects} to apply to the {@link #mediaItem}. */

View File

@ -36,6 +36,7 @@ import androidx.media3.common.Format;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes; import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.BitmapLoader; import androidx.media3.common.util.BitmapLoader;
import androidx.media3.common.util.ConstantRateTimestampIterator;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.datasource.DataSource; import androidx.media3.datasource.DataSource;
@ -178,7 +179,9 @@ public final class ImageAssetLoader implements AssetLoader {
// callback rather than setting duration here. // callback rather than setting duration here.
if (sampleConsumer == null if (sampleConsumer == null
|| !sampleConsumer.queueInputBitmap( || !sampleConsumer.queueInputBitmap(
bitmap, editedMediaItem.durationUs, editedMediaItem.frameRate)) { bitmap,
new ConstantRateTimestampIterator(
editedMediaItem.durationUs, editedMediaItem.frameRate))) {
scheduledExecutorService.schedule( scheduledExecutorService.schedule(
() -> queueBitmapInternal(bitmap, format), QUEUE_BITMAP_INTERVAL_MS, MILLISECONDS); () -> queueBitmapInternal(bitmap, format), QUEUE_BITMAP_INTERVAL_MS, MILLISECONDS);
return; return;

View File

@ -66,24 +66,6 @@ public interface SampleConsumer {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
/**
* Attempts to provide an input {@link Bitmap} to the consumer.
*
* <p>Should only be used for image data.
*
* @param inputBitmap The {@link Bitmap} to queue to the consumer.
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
* second.
* @return Whether the {@link Bitmap} was successfully queued. If {@code false}, the caller should
* try again later.
*/
// TODO(b/262693274): Delete this method and usages in favor of the one below (Note it is not
// deprecated because transformer still relies on this method for frame duplication).
default boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
throw new UnsupportedOperationException();
}
/** /**
* Attempts to provide an input {@link Bitmap} to the consumer. * Attempts to provide an input {@link Bitmap} to the consumer.
* *

View File

@ -348,37 +348,6 @@ import java.util.concurrent.atomic.AtomicInteger;
sequenceAssetLoaderListener.onError(exportException); sequenceAssetLoaderListener.onError(exportException);
} }
/**
* Wraps a {@link TimestampIterator}, providing all the values in the original timestamp iterator
* (in the same order) up to and including the first occurrence of the {@code clippingValue}.
*/
private static final class ClippingIterator implements TimestampIterator {
private final TimestampIterator iterator;
private final long clippingValue;
private boolean hasReachedClippingValue;
public ClippingIterator(TimestampIterator iterator, long clippingValue) {
this.iterator = iterator;
this.clippingValue = clippingValue;
}
@Override
public boolean hasNext() {
return !hasReachedClippingValue && iterator.hasNext();
}
@Override
public long next() {
checkState(hasNext());
long next = iterator.next();
if (clippingValue == next) {
hasReachedClippingValue = true;
}
return next;
}
}
// Classes accessed from AssetLoader threads. // Classes accessed from AssetLoader threads.
private final class SampleConsumerWrapper implements SampleConsumer { private final class SampleConsumerWrapper implements SampleConsumer {
@ -428,51 +397,31 @@ import java.util.concurrent.atomic.AtomicInteger;
return true; return true;
} }
@Override
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
if (isLooping && totalDurationUs + durationUs > maxSequenceDurationUs) {
if (!isMaxSequenceDurationUsFinal) {
return false;
}
durationUs = maxSequenceDurationUs - totalDurationUs;
if (durationUs == 0) {
if (!videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
videoLoopingEnded = true;
}
return sampleConsumer.queueInputBitmap(inputBitmap, durationUs, frameRate);
}
@Override @Override
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) { public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
TimestampIterator iteratorToUse = inStreamOffsetsUs;
if (isLooping) { if (isLooping) {
long durationLeftUs = maxSequenceDurationUs - totalDurationUs; long lastOffsetUs = C.TIME_UNSET;
if (durationLeftUs <= 0) {
if (!videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
while (inStreamOffsetsUs.hasNext()) { while (inStreamOffsetsUs.hasNext()) {
long offsetUs = inStreamOffsetsUs.next(); long offsetUs = inStreamOffsetsUs.next();
if (totalDurationUs + offsetUs > maxSequenceDurationUs) { if (totalDurationUs + offsetUs > maxSequenceDurationUs) {
if (!isMaxSequenceDurationUsFinal) { if (!isMaxSequenceDurationUsFinal) {
return false; return false;
} }
iteratorToUse = new ClippingIterator(inStreamOffsetsUs, offsetUs); if (lastOffsetUs == C.TIME_UNSET) {
if (!videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
inStreamOffsetsUs = new ClippingIterator(inStreamOffsetsUs.copyOf(), lastOffsetUs);
videoLoopingEnded = true; videoLoopingEnded = true;
break; break;
} }
lastOffsetUs = offsetUs;
} }
} }
return sampleConsumer.queueInputBitmap(inputBitmap, iteratorToUse); return sampleConsumer.queueInputBitmap(inputBitmap, inStreamOffsetsUs.copyOf());
} }
@Override @Override
@ -564,4 +513,40 @@ import java.util.concurrent.atomic.AtomicInteger;
}); });
} }
} }
/**
* Wraps a {@link TimestampIterator}, providing all the values in the original timestamp iterator
* (in the same order) up to and including the first occurrence of the {@code clippingValue}.
*/
private static final class ClippingIterator implements TimestampIterator {
private final TimestampIterator iterator;
private final long clippingValue;
private boolean hasReachedClippingValue;
public ClippingIterator(TimestampIterator iterator, long clippingValue) {
this.iterator = iterator;
this.clippingValue = clippingValue;
}
@Override
public boolean hasNext() {
return !hasReachedClippingValue && iterator.hasNext();
}
@Override
public long next() {
checkState(hasNext());
long next = iterator.next();
if (clippingValue <= next) {
hasReachedClippingValue = true;
}
return next;
}
@Override
public TimestampIterator copyOf() {
return new ClippingIterator(iterator.copyOf(), clippingValue);
}
}
} }

View File

@ -168,12 +168,6 @@ import java.util.concurrent.atomic.AtomicLong;
mediaItemOffsetUs.addAndGet(durationUs); mediaItemOffsetUs.addAndGet(durationUs);
} }
@Override
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
return true;
}
@Override @Override
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) { public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs); videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs);

View File

@ -22,6 +22,7 @@ import android.graphics.Bitmap;
import android.os.Looper; import android.os.Looper;
import androidx.media3.common.Format; import androidx.media3.common.Format;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.util.TimestampIterator;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import java.time.Duration; import java.time.Duration;
@ -126,7 +127,7 @@ public class ImageAssetLoaderTest {
private static final class FakeSampleConsumer implements SampleConsumer { private static final class FakeSampleConsumer implements SampleConsumer {
@Override @Override
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) { public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
return true; return true;
} }