Remove queueInputBitmap by framerate and duration interfaces
PiperOrigin-RevId: 559384011
This commit is contained in:
parent
64c2406c1e
commit
404a259295
@ -13,6 +13,8 @@
|
||||
object request (`nor`) and next range request (`nrr`)
|
||||
([#8699](https://github.com/google/ExoPlayer/issues/8699)).
|
||||
* Transformer:
|
||||
* Changed `frameRate` and `durationUs` parameters of
|
||||
`SampleConsumer.queueInputBitmap` to `TimestampIterator`.
|
||||
* Track Selection:
|
||||
* Extractors:
|
||||
* Audio:
|
||||
@ -22,6 +24,8 @@
|
||||
* Metadata:
|
||||
* DRM:
|
||||
* Effect:
|
||||
* Changed `frameRate` and `durationUs` parameters of
|
||||
`VideoFrameProcessor.queueInputBitmap` to `TimestampIterator`.
|
||||
* Muxers:
|
||||
* IMA extension:
|
||||
* Session:
|
||||
|
@ -150,26 +150,6 @@ public interface VideoFrameProcessor {
|
||||
/** Indicates the frame should be dropped after {@link #renderOutputFrame(long)} is invoked. */
|
||||
long DROP_OUTPUT_FRAME = -2;
|
||||
|
||||
/**
|
||||
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor} to generate an input stream
|
||||
* of frames.
|
||||
*
|
||||
* <p>Each call must be made after {@linkplain #registerInputStream registering a new input
|
||||
* stream}.
|
||||
*
|
||||
* <p>Can be called on any thread.
|
||||
*
|
||||
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
|
||||
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
|
||||
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
|
||||
* second.
|
||||
* @throws UnsupportedOperationException If the {@code VideoFrameProcessor} does not accept
|
||||
* {@linkplain #INPUT_TYPE_BITMAP bitmap input}.
|
||||
*/
|
||||
// TODO(b/262693274): Delete this method and usages in favor of the one below (Note it is not
|
||||
// deprecated because transformer still relies on this method for frame duplication).
|
||||
void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate);
|
||||
|
||||
/**
|
||||
* Provides an input {@link Bitmap} to the {@link VideoFrameProcessor}.
|
||||
*
|
||||
|
@ -30,6 +30,8 @@ import androidx.media3.common.C;
|
||||
@UnstableApi
|
||||
public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
|
||||
private final long durationUs;
|
||||
private final float frameRate;
|
||||
private final double framesDurationUs;
|
||||
private double currentTimestampUs;
|
||||
private int framesToAdd;
|
||||
@ -45,6 +47,8 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
@FloatRange(from = 0, fromInclusive = false) float frameRate) {
|
||||
checkArgument(durationUs > 0);
|
||||
checkArgument(frameRate > 0);
|
||||
this.durationUs = durationUs;
|
||||
this.frameRate = frameRate;
|
||||
framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
|
||||
framesDurationUs = C.MICROS_PER_SECOND / frameRate;
|
||||
}
|
||||
@ -62,4 +66,9 @@ public final class ConstantRateTimestampIterator implements TimestampIterator {
|
||||
currentTimestampUs += framesDurationUs;
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConstantRateTimestampIterator copyOf() {
|
||||
return new ConstantRateTimestampIterator(durationUs, frameRate);
|
||||
}
|
||||
}
|
||||
|
@ -15,8 +15,6 @@
|
||||
*/
|
||||
package androidx.media3.common.util;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
/** A primitive long iterator used for generating sequences of timestamps. */
|
||||
@UnstableApi
|
||||
public interface TimestampIterator {
|
||||
@ -27,18 +25,6 @@ public interface TimestampIterator {
|
||||
/** Returns the next timestamp. */
|
||||
long next();
|
||||
|
||||
/** Creates TimestampIterator */
|
||||
static TimestampIterator createFromLongIterator(Iterator<Long> iterator) {
|
||||
return new TimestampIterator() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
return iterator.next();
|
||||
}
|
||||
};
|
||||
}
|
||||
/** Returns fresh copy of the iterator. */
|
||||
TimestampIterator copyOf();
|
||||
}
|
||||
|
@ -16,7 +16,6 @@
|
||||
package androidx.media3.effect;
|
||||
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.TimestampIterator.createFromLongIterator;
|
||||
import static androidx.media3.test.utils.BitmapPixelTestUtil.readBitmap;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
@ -24,9 +23,12 @@ import android.graphics.Bitmap;
|
||||
import android.util.Pair;
|
||||
import androidx.media3.common.C;
|
||||
import androidx.media3.common.ColorInfo;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.test.utils.VideoFrameProcessorTestRunner;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
@ -210,14 +212,36 @@ public class DefaultVideoFrameProcessorImageFrameOutputTest {
|
||||
videoFrameProcessorTestRunner.queueInputBitmaps(
|
||||
bitmap1.getWidth(),
|
||||
bitmap1.getHeight(),
|
||||
Pair.create(bitmap1, createFromLongIterator(ImmutableList.of(offset1).iterator())),
|
||||
Pair.create(
|
||||
bitmap2, createFromLongIterator(ImmutableList.of(offset2, offset3).iterator())));
|
||||
Pair.create(bitmap1, createTimestampIterator(ImmutableList.of(offset1))),
|
||||
Pair.create(bitmap2, createTimestampIterator(ImmutableList.of(offset2, offset3))));
|
||||
videoFrameProcessorTestRunner.endFrameProcessing();
|
||||
|
||||
assertThat(actualPresentationTimesUs).containsExactly(offset1, offset2, offset3).inOrder();
|
||||
}
|
||||
|
||||
private static TimestampIterator createTimestampIterator(List<Long> elements) {
|
||||
|
||||
Iterator<Long> elementsIterator = elements.iterator();
|
||||
|
||||
return new TimestampIterator() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return elementsIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
return elementsIterator.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimestampIterator copyOf() {
|
||||
// Method not needed for effects tests.
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder(
|
||||
String testId) {
|
||||
return new VideoFrameProcessorTestRunner.Builder()
|
||||
|
@ -18,7 +18,6 @@ package androidx.media3.effect;
|
||||
import static androidx.media3.common.util.Assertions.checkArgument;
|
||||
import static androidx.media3.common.util.Assertions.checkNotNull;
|
||||
import static androidx.media3.common.util.Assertions.checkState;
|
||||
import static java.lang.Math.round;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.opengl.GLES20;
|
||||
@ -30,6 +29,7 @@ import androidx.media3.common.GlObjectsProvider;
|
||||
import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import java.util.Queue;
|
||||
@ -57,10 +57,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
private @MonotonicNonNull GlTextureInfo currentGlTextureInfo;
|
||||
private int downstreamShaderProgramCapacity;
|
||||
private int framesToQueueForCurrentBitmap;
|
||||
private double currentPresentationTimeUs;
|
||||
private boolean useHdr;
|
||||
private boolean currentInputStreamEnded;
|
||||
private boolean isNextFrameInTexture;
|
||||
|
||||
/**
|
||||
* Creates a new instance.
|
||||
@ -92,10 +91,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
@Override
|
||||
public void queueInputBitmap(
|
||||
Bitmap inputBitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr) {
|
||||
Bitmap inputBitmap,
|
||||
FrameInfo frameInfo,
|
||||
TimestampIterator inStreamOffsetsUs,
|
||||
boolean useHdr) {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
setupBitmap(inputBitmap, durationUs, frameInfo, frameRate, useHdr);
|
||||
setupBitmap(inputBitmap, frameInfo, inStreamOffsetsUs, useHdr);
|
||||
currentInputStreamEnded = false;
|
||||
});
|
||||
}
|
||||
@ -110,7 +112,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
public void signalEndOfCurrentInputStream() {
|
||||
videoFrameProcessingTaskExecutor.submit(
|
||||
() -> {
|
||||
if (framesToQueueForCurrentBitmap == 0 && pendingBitmaps.isEmpty()) {
|
||||
if (pendingBitmaps.isEmpty()) {
|
||||
shaderProgram.signalEndOfCurrentInputStream();
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
|
||||
@ -137,7 +139,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
|
||||
// Methods that must be called on the GL thread.
|
||||
private void setupBitmap(
|
||||
Bitmap bitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr)
|
||||
Bitmap bitmap, FrameInfo frameInfo, TimestampIterator inStreamOffsetsUs, boolean useHdr)
|
||||
throws VideoFrameProcessingException {
|
||||
if (Util.SDK_INT >= 26) {
|
||||
checkState(
|
||||
@ -147,17 +149,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
checkState(
|
||||
!bitmap.getConfig().equals(Bitmap.Config.RGBA_1010102), UNSUPPORTED_IMAGE_CONFIGURATION);
|
||||
}
|
||||
|
||||
this.useHdr = useHdr;
|
||||
// TODO(b/262693274): move frame duplication logic out of the texture manager. Note this will
|
||||
// involve removing the BitmapFrameSequenceInfo queue and using the FrameConsumptionManager
|
||||
// instead. It will also remove the framesToAdd variable
|
||||
int framesToAdd = round(frameRate * (durationUs / (float) C.MICROS_PER_SECOND));
|
||||
// framestoAdd > 0 otherwise the VFP will hang.
|
||||
checkArgument(framesToAdd > 0);
|
||||
double frameDurationUs = C.MICROS_PER_SECOND / frameRate;
|
||||
pendingBitmaps.add(
|
||||
new BitmapFrameSequenceInfo(bitmap, frameInfo, frameDurationUs, framesToAdd));
|
||||
checkArgument(inStreamOffsetsUs.hasNext(), "Bitmap queued but no timestamps provided.");
|
||||
pendingBitmaps.add(new BitmapFrameSequenceInfo(bitmap, frameInfo, inStreamOffsetsUs));
|
||||
maybeQueueToShaderProgram();
|
||||
}
|
||||
|
||||
@ -167,51 +161,29 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
|
||||
BitmapFrameSequenceInfo currentBitmapInfo = checkNotNull(pendingBitmaps.peek());
|
||||
if (framesToQueueForCurrentBitmap == 0) {
|
||||
Bitmap bitmap = currentBitmapInfo.bitmap;
|
||||
framesToQueueForCurrentBitmap = currentBitmapInfo.numberOfFrames;
|
||||
currentPresentationTimeUs = currentBitmapInfo.frameInfo.offsetToAddUs;
|
||||
int currentTexId;
|
||||
try {
|
||||
if (currentGlTextureInfo != null) {
|
||||
currentGlTextureInfo.release();
|
||||
}
|
||||
currentTexId =
|
||||
GlUtil.createTexture(
|
||||
currentBitmapInfo.frameInfo.width,
|
||||
currentBitmapInfo.frameInfo.height,
|
||||
/* useHighPrecisionColorComponents= */ useHdr);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, currentTexId);
|
||||
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
|
||||
GlUtil.checkGlError();
|
||||
} catch (GlUtil.GlException e) {
|
||||
throw VideoFrameProcessingException.from(e);
|
||||
FrameInfo currentFrameInfo = currentBitmapInfo.frameInfo;
|
||||
TimestampIterator inStreamOffsetsUs = currentBitmapInfo.inStreamOffsetsUs;
|
||||
checkState(currentBitmapInfo.inStreamOffsetsUs.hasNext());
|
||||
long currentPresentationTimeUs =
|
||||
currentBitmapInfo.frameInfo.offsetToAddUs + inStreamOffsetsUs.next();
|
||||
if (!isNextFrameInTexture) {
|
||||
isNextFrameInTexture = true;
|
||||
updateCurrentGlTextureInfo(currentFrameInfo, currentBitmapInfo.bitmap);
|
||||
}
|
||||
|
||||
currentGlTextureInfo =
|
||||
new GlTextureInfo(
|
||||
currentTexId,
|
||||
/* fboId= */ C.INDEX_UNSET,
|
||||
/* rboId= */ C.INDEX_UNSET,
|
||||
currentBitmapInfo.frameInfo.width,
|
||||
currentBitmapInfo.frameInfo.height);
|
||||
}
|
||||
|
||||
framesToQueueForCurrentBitmap--;
|
||||
downstreamShaderProgramCapacity--;
|
||||
shaderProgram.queueInputFrame(
|
||||
glObjectsProvider, checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
|
||||
glObjectsProvider, checkNotNull(currentGlTextureInfo), currentPresentationTimeUs);
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP,
|
||||
(long) currentPresentationTimeUs,
|
||||
/* extra= */ currentBitmapInfo.frameInfo.width + "x" + currentBitmapInfo.frameInfo.height);
|
||||
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
|
||||
currentPresentationTimeUs,
|
||||
/* extra= */ currentFrameInfo.width + "x" + currentFrameInfo.height);
|
||||
|
||||
if (framesToQueueForCurrentBitmap == 0) {
|
||||
if (!currentBitmapInfo.inStreamOffsetsUs.hasNext()) {
|
||||
isNextFrameInTexture = false;
|
||||
pendingBitmaps.remove();
|
||||
if (pendingBitmaps.isEmpty() && currentInputStreamEnded) {
|
||||
// Only signal end of stream after all pending bitmaps are processed.
|
||||
// TODO(b/269424561): Call signalEndOfCurrentInputStream on every bitmap
|
||||
shaderProgram.signalEndOfCurrentInputStream();
|
||||
DebugTraceUtil.logEvent(
|
||||
DebugTraceUtil.EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
|
||||
@ -220,19 +192,42 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
|
||||
}
|
||||
}
|
||||
|
||||
/** Information to generate all the frames associated with a specific {@link Bitmap}. */
|
||||
/** Information needed to generate all the frames associated with a specific {@link Bitmap}. */
|
||||
private static final class BitmapFrameSequenceInfo {
|
||||
public final Bitmap bitmap;
|
||||
public final FrameInfo frameInfo;
|
||||
public final double frameDurationUs;
|
||||
public final int numberOfFrames;
|
||||
private final FrameInfo frameInfo;
|
||||
private final TimestampIterator inStreamOffsetsUs;
|
||||
|
||||
public BitmapFrameSequenceInfo(
|
||||
Bitmap bitmap, FrameInfo frameInfo, double frameDurationUs, int numberOfFrames) {
|
||||
Bitmap bitmap, FrameInfo frameInfo, TimestampIterator inStreamOffsetsUs) {
|
||||
this.bitmap = bitmap;
|
||||
this.frameInfo = frameInfo;
|
||||
this.frameDurationUs = frameDurationUs;
|
||||
this.numberOfFrames = numberOfFrames;
|
||||
this.inStreamOffsetsUs = inStreamOffsetsUs;
|
||||
}
|
||||
}
|
||||
|
||||
private void updateCurrentGlTextureInfo(FrameInfo frameInfo, Bitmap bitmap)
|
||||
throws VideoFrameProcessingException {
|
||||
int currentTexId;
|
||||
try {
|
||||
if (currentGlTextureInfo != null) {
|
||||
currentGlTextureInfo.release();
|
||||
}
|
||||
currentTexId =
|
||||
GlUtil.createTexture(
|
||||
frameInfo.width, frameInfo.height, /* useHighPrecisionColorComponents= */ useHdr);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, currentTexId);
|
||||
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
|
||||
GlUtil.checkGlError();
|
||||
} catch (GlUtil.GlException e) {
|
||||
throw VideoFrameProcessingException.from(e);
|
||||
}
|
||||
currentGlTextureInfo =
|
||||
new GlTextureInfo(
|
||||
currentTexId,
|
||||
/* fboId= */ C.INDEX_UNSET,
|
||||
/* rboId= */ C.INDEX_UNSET,
|
||||
frameInfo.width,
|
||||
frameInfo.height);
|
||||
}
|
||||
}
|
||||
|
@ -349,7 +349,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
private volatile @MonotonicNonNull CountDownLatch latch;
|
||||
private volatile @MonotonicNonNull FrameInfo nextInputFrameInfo;
|
||||
private volatile boolean inputStreamEnded;
|
||||
private volatile boolean hasRefreshedNextInputFrameInfo;
|
||||
|
||||
private DefaultVideoFrameProcessor(
|
||||
Context context,
|
||||
@ -421,41 +420,17 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
inputSwitcher.activeTextureManager().setDefaultBufferSize(width, height);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInputBitmap(Bitmap inputBitmap, long durationUs, float frameRate) {
|
||||
checkState(
|
||||
hasRefreshedNextInputFrameInfo,
|
||||
"registerInputStream must be called before queueing another bitmap");
|
||||
inputSwitcher
|
||||
.activeTextureManager()
|
||||
.queueInputBitmap(
|
||||
inputBitmap,
|
||||
durationUs,
|
||||
checkNotNull(nextInputFrameInfo),
|
||||
frameRate,
|
||||
/* useHdr= */ false);
|
||||
hasRefreshedNextInputFrameInfo = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
|
||||
FrameInfo frameInfo = checkNotNull(this.nextInputFrameInfo);
|
||||
// TODO(b/262693274): move frame duplication logic out of the texture manager so
|
||||
// textureManager.queueInputBitmap() frame rate and duration parameters be removed.
|
||||
while (inStreamOffsetsUs.hasNext()) {
|
||||
long inStreamOffsetUs = inStreamOffsetsUs.next();
|
||||
inputSwitcher
|
||||
.activeTextureManager()
|
||||
.queueInputBitmap(
|
||||
inputBitmap,
|
||||
/* durationUs= */ C.MICROS_PER_SECOND,
|
||||
new FrameInfo.Builder(frameInfo)
|
||||
.setOffsetToAddUs(frameInfo.offsetToAddUs + inStreamOffsetUs)
|
||||
.build(),
|
||||
/* frameRate= */ 1,
|
||||
new FrameInfo.Builder(frameInfo).setOffsetToAddUs(frameInfo.offsetToAddUs).build(),
|
||||
inStreamOffsetsUs,
|
||||
/* useHdr= */ false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void queueInputTexture(int textureId, long presentationTimeUs) {
|
||||
@ -482,7 +457,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
"InputType %s - %dx%d",
|
||||
getInputTypeString(inputType), frameInfo.width, frameInfo.height));
|
||||
nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
|
||||
hasRefreshedNextInputFrameInfo = true;
|
||||
synchronized (lock) {
|
||||
if (!processingInput) {
|
||||
videoFrameProcessingTaskExecutor.submitAndBlock(() -> configureEffects(effects));
|
||||
@ -524,7 +498,6 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
|
||||
nextInputFrameInfo, "registerInputStream must be called before registering input frames");
|
||||
|
||||
inputSwitcher.activeTextureManager().registerInputFrame(nextInputFrameInfo);
|
||||
hasRefreshedNextInputFrameInfo = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -25,6 +25,7 @@ import androidx.media3.common.FrameInfo;
|
||||
import androidx.media3.common.OnInputFrameProcessedListener;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
|
||||
/** Handles {@code DefaultVideoFrameProcessor}'s input. */
|
||||
/* package */ interface TextureManager extends GlShaderProgram.InputListener {
|
||||
@ -42,13 +43,16 @@ import androidx.media3.common.VideoFrameProcessor;
|
||||
* Provides an input {@link Bitmap} to put into the video frames.
|
||||
*
|
||||
* @param inputBitmap The {@link Bitmap} queued to the {@code VideoFrameProcessor}.
|
||||
* @param durationUs The duration of the bitmap in the composition, in microseconds.
|
||||
* @param frameInfo Information about the bitmap being queued.
|
||||
* @param frameRate The rate at which to generate frames with the bitmap, in frames per second.
|
||||
* @param inStreamOffsetsUs The times within the current stream that the bitmap should be shown
|
||||
* at. The timestamps should be monotonically increasing.
|
||||
* @param useHdr Whether input and/or output colors are HDR.
|
||||
*/
|
||||
default void queueInputBitmap(
|
||||
Bitmap inputBitmap, long durationUs, FrameInfo frameInfo, float frameRate, boolean useHdr) {
|
||||
Bitmap inputBitmap,
|
||||
FrameInfo frameInfo,
|
||||
TimestampIterator inStreamOffsetsUs,
|
||||
boolean useHdr) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -44,6 +44,7 @@ import androidx.media3.common.GlTextureInfo;
|
||||
import androidx.media3.common.SurfaceInfo;
|
||||
import androidx.media3.common.VideoFrameProcessingException;
|
||||
import androidx.media3.common.VideoFrameProcessor;
|
||||
import androidx.media3.common.util.ConstantRateTimestampIterator;
|
||||
import androidx.media3.common.util.GlUtil;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
@ -254,8 +255,7 @@ public final class VideoFrameProcessorTestRunner {
|
||||
private final AtomicReference<VideoFrameProcessingException> videoFrameProcessingException;
|
||||
private final VideoFrameProcessor videoFrameProcessor;
|
||||
private final ImmutableList<Effect> effects;
|
||||
|
||||
private @MonotonicNonNull BitmapReader bitmapReader;
|
||||
private final @MonotonicNonNull BitmapReader bitmapReader;
|
||||
|
||||
private VideoFrameProcessorTestRunner(
|
||||
String testId,
|
||||
@ -356,7 +356,8 @@ public final class VideoFrameProcessorTestRunner {
|
||||
.setPixelWidthHeightRatio(pixelWidthHeightRatio)
|
||||
.setOffsetToAddUs(offsetToAddUs)
|
||||
.build());
|
||||
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
|
||||
videoFrameProcessor.queueInputBitmap(
|
||||
inputBitmap, new ConstantRateTimestampIterator(durationUs, frameRate));
|
||||
}
|
||||
|
||||
public void queueInputBitmaps(int width, int height, Pair<Bitmap, TimestampIterator>... frames) {
|
||||
|
@ -622,8 +622,8 @@ public class TransformerEndToEndTest {
|
||||
|
||||
assertThat(result.exportResult.processedInputs).hasSize(7);
|
||||
assertThat(result.exportResult.channelCount).isEqualTo(1);
|
||||
assertThat(result.exportResult.videoFrameCount).isEqualTo(94);
|
||||
assertThat(result.exportResult.durationMs).isEqualTo(3100);
|
||||
assertThat(result.exportResult.durationMs).isEqualTo(3133);
|
||||
assertThat(result.exportResult.videoFrameCount).isEqualTo(95);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -216,7 +216,7 @@ public final class EditedMediaItem {
|
||||
public final long durationUs;
|
||||
|
||||
/** The frame rate of the image in the output video, in frames per second. */
|
||||
@IntRange(from = 0)
|
||||
@IntRange(from = 1)
|
||||
public final int frameRate;
|
||||
|
||||
/** The {@link Effects} to apply to the {@link #mediaItem}. */
|
||||
|
@ -36,6 +36,7 @@ import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.MimeTypes;
|
||||
import androidx.media3.common.util.BitmapLoader;
|
||||
import androidx.media3.common.util.ConstantRateTimestampIterator;
|
||||
import androidx.media3.common.util.UnstableApi;
|
||||
import androidx.media3.common.util.Util;
|
||||
import androidx.media3.datasource.DataSource;
|
||||
@ -178,7 +179,9 @@ public final class ImageAssetLoader implements AssetLoader {
|
||||
// callback rather than setting duration here.
|
||||
if (sampleConsumer == null
|
||||
|| !sampleConsumer.queueInputBitmap(
|
||||
bitmap, editedMediaItem.durationUs, editedMediaItem.frameRate)) {
|
||||
bitmap,
|
||||
new ConstantRateTimestampIterator(
|
||||
editedMediaItem.durationUs, editedMediaItem.frameRate))) {
|
||||
scheduledExecutorService.schedule(
|
||||
() -> queueBitmapInternal(bitmap, format), QUEUE_BITMAP_INTERVAL_MS, MILLISECONDS);
|
||||
return;
|
||||
|
@ -66,24 +66,6 @@ public interface SampleConsumer {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to provide an input {@link Bitmap} to the consumer.
|
||||
*
|
||||
* <p>Should only be used for image data.
|
||||
*
|
||||
* @param inputBitmap The {@link Bitmap} to queue to the consumer.
|
||||
* @param durationUs The duration for which to display the {@code inputBitmap}, in microseconds.
|
||||
* @param frameRate The frame rate at which to display the {@code inputBitmap}, in frames per
|
||||
* second.
|
||||
* @return Whether the {@link Bitmap} was successfully queued. If {@code false}, the caller should
|
||||
* try again later.
|
||||
*/
|
||||
// TODO(b/262693274): Delete this method and usages in favor of the one below (Note it is not
|
||||
// deprecated because transformer still relies on this method for frame duplication).
|
||||
default boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to provide an input {@link Bitmap} to the consumer.
|
||||
*
|
||||
|
@ -348,37 +348,6 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
sequenceAssetLoaderListener.onError(exportException);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a {@link TimestampIterator}, providing all the values in the original timestamp iterator
|
||||
* (in the same order) up to and including the first occurrence of the {@code clippingValue}.
|
||||
*/
|
||||
private static final class ClippingIterator implements TimestampIterator {
|
||||
|
||||
private final TimestampIterator iterator;
|
||||
private final long clippingValue;
|
||||
private boolean hasReachedClippingValue;
|
||||
|
||||
public ClippingIterator(TimestampIterator iterator, long clippingValue) {
|
||||
this.iterator = iterator;
|
||||
this.clippingValue = clippingValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !hasReachedClippingValue && iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
checkState(hasNext());
|
||||
long next = iterator.next();
|
||||
if (clippingValue == next) {
|
||||
hasReachedClippingValue = true;
|
||||
}
|
||||
return next;
|
||||
}
|
||||
}
|
||||
|
||||
// Classes accessed from AssetLoader threads.
|
||||
|
||||
private final class SampleConsumerWrapper implements SampleConsumer {
|
||||
@ -428,51 +397,31 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
||||
if (isLooping && totalDurationUs + durationUs > maxSequenceDurationUs) {
|
||||
if (!isMaxSequenceDurationUsFinal) {
|
||||
return false;
|
||||
}
|
||||
durationUs = maxSequenceDurationUs - totalDurationUs;
|
||||
if (durationUs == 0) {
|
||||
if (!videoLoopingEnded) {
|
||||
videoLoopingEnded = true;
|
||||
signalEndOfVideoInput();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
videoLoopingEnded = true;
|
||||
}
|
||||
|
||||
return sampleConsumer.queueInputBitmap(inputBitmap, durationUs, frameRate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
|
||||
TimestampIterator iteratorToUse = inStreamOffsetsUs;
|
||||
if (isLooping) {
|
||||
long durationLeftUs = maxSequenceDurationUs - totalDurationUs;
|
||||
if (durationLeftUs <= 0) {
|
||||
if (!videoLoopingEnded) {
|
||||
videoLoopingEnded = true;
|
||||
signalEndOfVideoInput();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
long lastOffsetUs = C.TIME_UNSET;
|
||||
while (inStreamOffsetsUs.hasNext()) {
|
||||
long offsetUs = inStreamOffsetsUs.next();
|
||||
if (totalDurationUs + offsetUs > maxSequenceDurationUs) {
|
||||
if (!isMaxSequenceDurationUsFinal) {
|
||||
return false;
|
||||
}
|
||||
iteratorToUse = new ClippingIterator(inStreamOffsetsUs, offsetUs);
|
||||
if (lastOffsetUs == C.TIME_UNSET) {
|
||||
if (!videoLoopingEnded) {
|
||||
videoLoopingEnded = true;
|
||||
signalEndOfVideoInput();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
inStreamOffsetsUs = new ClippingIterator(inStreamOffsetsUs.copyOf(), lastOffsetUs);
|
||||
videoLoopingEnded = true;
|
||||
break;
|
||||
}
|
||||
lastOffsetUs = offsetUs;
|
||||
}
|
||||
}
|
||||
return sampleConsumer.queueInputBitmap(inputBitmap, iteratorToUse);
|
||||
return sampleConsumer.queueInputBitmap(inputBitmap, inStreamOffsetsUs.copyOf());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -564,4 +513,40 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a {@link TimestampIterator}, providing all the values in the original timestamp iterator
|
||||
* (in the same order) up to and including the first occurrence of the {@code clippingValue}.
|
||||
*/
|
||||
private static final class ClippingIterator implements TimestampIterator {
|
||||
|
||||
private final TimestampIterator iterator;
|
||||
private final long clippingValue;
|
||||
private boolean hasReachedClippingValue;
|
||||
|
||||
public ClippingIterator(TimestampIterator iterator, long clippingValue) {
|
||||
this.iterator = iterator;
|
||||
this.clippingValue = clippingValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !hasReachedClippingValue && iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long next() {
|
||||
checkState(hasNext());
|
||||
long next = iterator.next();
|
||||
if (clippingValue <= next) {
|
||||
hasReachedClippingValue = true;
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimestampIterator copyOf() {
|
||||
return new ClippingIterator(iterator.copyOf(), clippingValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -168,12 +168,6 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
mediaItemOffsetUs.addAndGet(durationUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
||||
videoFrameProcessor.queueInputBitmap(inputBitmap, durationUs, frameRate);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
|
||||
videoFrameProcessor.queueInputBitmap(inputBitmap, inStreamOffsetsUs);
|
||||
|
@ -22,6 +22,7 @@ import android.graphics.Bitmap;
|
||||
import android.os.Looper;
|
||||
import androidx.media3.common.Format;
|
||||
import androidx.media3.common.MediaItem;
|
||||
import androidx.media3.common.util.TimestampIterator;
|
||||
import androidx.test.core.app.ApplicationProvider;
|
||||
import androidx.test.ext.junit.runners.AndroidJUnit4;
|
||||
import java.time.Duration;
|
||||
@ -126,7 +127,7 @@ public class ImageAssetLoaderTest {
|
||||
private static final class FakeSampleConsumer implements SampleConsumer {
|
||||
|
||||
@Override
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRate) {
|
||||
public boolean queueInputBitmap(Bitmap inputBitmap, TimestampIterator inStreamOffsetsUs) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user