Compare commits

...

2 Commits

Author SHA1 Message Date
sheenachhabra
bd14b753ee Add forceVideoTrack flag on EditedMediaItemSequence
This is similar to forceAudioTrack.

PiperOrigin-RevId: 743235372
2025-04-02 12:24:58 -07:00
tonihei
989e9f9e84 Remove remaining synchronized keywords from EPII
These are needed for the `waitUninterruptibly` handling, which is
really just waiting for a condition to become true on another thread
with a timeout, as well as Clock and interrupt handling.

We already have ConditionVariable that serves this purpose, which
has methods with a timeout and with interrupt handling. Adding
another version of the call with both timeout and interrupt handling
allows to replace the EPII manual code. The ConditionVariable methods
were also missing the clock calls to signal a wait operation.

PiperOrigin-RevId: 743214709
2025-04-02 11:30:04 -07:00
7 changed files with 416 additions and 109 deletions

View File

@ -82,6 +82,7 @@ public class ConditionVariable {
*/
public synchronized void block() throws InterruptedException {
while (!isOpen) {
clock.onThreadBlocked();
wait();
}
}
@ -105,6 +106,7 @@ public class ConditionVariable {
block();
} else {
while (!isOpen && nowMs < endMs) {
clock.onThreadBlocked();
wait(endMs - nowMs);
nowMs = clock.elapsedRealtime();
}
@ -113,14 +115,17 @@ public class ConditionVariable {
}
/**
* Blocks until the condition is open. Unlike {@link #block}, this method will continue to block
* if the calling thread is interrupted. If the calling thread was interrupted then its {@link
* Thread#isInterrupted() interrupted status} will be set when the method returns.
* Blocks until the condition is open.
*
* <p>Unlike {@link #block}, this method will continue to block if the calling thread is
* interrupted. If the calling thread was interrupted then its {@link Thread#isInterrupted()
* interrupted status} will be set when the method returns.
*/
public synchronized void blockUninterruptible() {
boolean wasInterrupted = false;
while (!isOpen) {
try {
clock.onThreadBlocked();
wait();
} catch (InterruptedException e) {
wasInterrupted = true;
@ -132,6 +137,45 @@ public class ConditionVariable {
}
}
/**
* Blocks until the condition is open or until {@code timeoutMs} have passed.
*
* <p>Unlike {@link #block}, this method will continue to block if the calling thread is
* interrupted. If the calling thread was interrupted then its {@link Thread#isInterrupted()
* interrupted status} will be set when the method returns.
*
* @param timeoutMs The maximum time to wait in milliseconds. If {@code timeoutMs <= 0} then the
* call will return immediately without blocking.
* @return True if the condition was opened, false if the call returns because of the timeout.
*/
public synchronized boolean blockUninterruptible(long timeoutMs) {
if (timeoutMs <= 0) {
return isOpen;
}
long nowMs = clock.elapsedRealtime();
long endMs = nowMs + timeoutMs;
if (endMs < nowMs) {
// timeoutMs is large enough for (nowMs + timeoutMs) to rollover. Block indefinitely.
blockUninterruptible();
} else {
boolean wasInterrupted = false;
while (!isOpen && nowMs < endMs) {
try {
clock.onThreadBlocked();
wait(endMs - nowMs);
} catch (InterruptedException e) {
wasInterrupted = true;
}
nowMs = clock.elapsedRealtime();
}
if (wasInterrupted) {
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
}
return isOpen;
}
/** Returns whether the condition is opened. */
public synchronized boolean isOpen() {
return isOpen;

View File

@ -33,14 +33,14 @@ public class ConditionVariableTest {
}
@Test
public void blockWithTimeout_timesOut() throws InterruptedException {
public void block_withTimeoutUnopened_timesOut() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
assertThat(conditionVariable.block(1)).isFalse();
assertThat(conditionVariable.isOpen()).isFalse();
}
@Test
public void blockWithTimeout_blocksForAtLeastTimeout() throws InterruptedException {
public void block_withTimeoutUnopened_blocksForAtLeastTimeout() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
long startTimeMs = System.currentTimeMillis();
assertThat(conditionVariable.block(/* timeoutMs= */ 500)).isFalse();
@ -49,7 +49,8 @@ public class ConditionVariableTest {
}
@Test
public void blockWithMaxTimeout_blocks_thenThrowsWhenInterrupted() throws InterruptedException {
public void block_withMaxTimeoutUnopened_blocksThenThrowsWhenInterrupted()
throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean();
@ -76,7 +77,7 @@ public class ConditionVariableTest {
}
@Test
public void block_blocks_thenThrowsWhenInterrupted() throws InterruptedException {
public void block_unopened_blocksThenThrowsWhenInterrupted() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean();
@ -103,7 +104,7 @@ public class ConditionVariableTest {
}
@Test
public void block_blocks_thenReturnsWhenOpened() throws InterruptedException {
public void block_opened_blocksThenReturnsWhenOpened() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean();
@ -130,7 +131,7 @@ public class ConditionVariableTest {
}
@Test
public void blockUnterruptible_blocksIfInterrupted_thenUnblocksWhenOpened()
public void blockUninterruptible_blocksIfInterruptedThenUnblocksWhenOpened()
throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
@ -140,8 +141,8 @@ public class ConditionVariableTest {
new Thread(
() -> {
conditionVariable.blockUninterruptible();
blockReturned.set(true);
interruptedStatusSet.set(Thread.currentThread().isInterrupted());
blockReturned.set(true);
});
blockingThread.start();
@ -160,6 +161,58 @@ public class ConditionVariableTest {
assertThat(conditionVariable.isOpen()).isTrue();
}
@Test
public void blockUninterruptible_withTimeoutUnopened_timesOut() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
assertThat(conditionVariable.blockUninterruptible(1)).isFalse();
assertThat(conditionVariable.isOpen()).isFalse();
}
@Test
public void blockUninterruptible_withTimeoutUnopened_blocksForAtLeastTimeout()
throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
long startTimeMs = System.currentTimeMillis();
assertThat(conditionVariable.blockUninterruptible(/* timeoutMs= */ 500)).isFalse();
long endTimeMs = System.currentTimeMillis();
assertThat(endTimeMs - startTimeMs).isAtLeast(500);
}
@Test
public void blockUninterruptible_withMaxTimeout_blocksUntilOpened() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean();
AtomicBoolean interruptedStatusSet = new AtomicBoolean();
Thread blockingThread =
new Thread(
() -> {
conditionVariable.blockUninterruptible(/* timeoutMs= */ Long.MAX_VALUE);
interruptedStatusSet.set(Thread.currentThread().isInterrupted());
blockReturned.set(true);
});
blockingThread.start();
Thread.sleep(500);
assertThat(blockReturned.get()).isFalse();
blockingThread.interrupt();
Thread.sleep(500);
// blockUninterruptible should still be blocked.
assertThat(blockReturned.get()).isFalse();
conditionVariable.open();
blockingThread.join();
// blockUninterruptible should have set the thread's interrupted status on exit.
assertThat(interruptedStatusSet.get()).isTrue();
assertThat(conditionVariable.isOpen()).isTrue();
}
private static ConditionVariable buildTestConditionVariable() {
return new ConditionVariable(
new SystemClock() {

View File

@ -54,6 +54,7 @@ import androidx.media3.common.Player.RepeatMode;
import androidx.media3.common.Timeline;
import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.Log;
import androidx.media3.common.util.TraceUtil;
@ -80,7 +81,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
/** Implements the internal behavior of {@link ExoPlayerImpl}. */
/* package */ final class ExoPlayerImplInternal
@ -536,12 +536,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
handler.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 1, 0).sendToTarget();
return true;
} else {
AtomicBoolean processedFlag = new AtomicBoolean();
ConditionVariable processedCondition = new ConditionVariable(clock);
handler
.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedFlag)
.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedCondition)
.sendToTarget();
waitUninterruptibly(processedFlag, setForegroundModeTimeoutMs);
return processedFlag.get();
return processedCondition.blockUninterruptible(setForegroundModeTimeoutMs);
}
}
@ -560,13 +559,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
return true;
}
AtomicBoolean processedFlag = new AtomicBoolean();
ConditionVariable processedCondition = new ConditionVariable(clock);
handler
.obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedFlag))
.obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedCondition))
.sendToTarget();
if (timeoutMs != C.TIME_UNSET) {
waitUninterruptibly(processedFlag, timeoutMs);
return processedFlag.get();
return processedCondition.blockUninterruptible(timeoutMs);
}
return true;
}
@ -581,10 +579,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
return true;
}
releasedOnApplicationThread = true;
AtomicBoolean processedFlag = new AtomicBoolean();
handler.obtainMessage(MSG_RELEASE, processedFlag).sendToTarget();
waitUninterruptibly(processedFlag, releaseTimeoutMs);
return processedFlag.get();
ConditionVariable processedCondition = new ConditionVariable(clock);
handler.obtainMessage(MSG_RELEASE, processedCondition).sendToTarget();
return processedCondition.blockUninterruptible(releaseTimeoutMs);
}
public Looper getPlaybackLooper() {
@ -707,13 +704,15 @@ import java.util.concurrent.atomic.AtomicBoolean;
break;
case MSG_SET_FOREGROUND_MODE:
setForegroundModeInternal(
/* foregroundMode= */ msg.arg1 != 0, /* processedFlag= */ (AtomicBoolean) msg.obj);
/* foregroundMode= */ msg.arg1 != 0,
/* processedCondition= */ (ConditionVariable) msg.obj);
break;
case MSG_SET_VIDEO_OUTPUT:
Pair<Object, AtomicBoolean> setVideoOutputPayload = (Pair<Object, AtomicBoolean>) msg.obj;
Pair<Object, ConditionVariable> setVideoOutputPayload =
(Pair<Object, ConditionVariable>) msg.obj;
setVideoOutputInternal(
/* videoOutput= */ setVideoOutputPayload.first,
/* processedFlag= */ setVideoOutputPayload.second);
/* processedCondition= */ setVideoOutputPayload.second);
break;
case MSG_STOP:
stopInternal(/* forceResetRenderers= */ false, /* acknowledgeStop= */ true);
@ -783,7 +782,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setVideoFrameMetadataListenerInternal((VideoFrameMetadataListener) msg.obj);
break;
case MSG_RELEASE:
releaseInternal(/* processedFlag= */ (AtomicBoolean) msg.obj);
releaseInternal(/* processedCondition= */ (ConditionVariable) msg.obj);
// Return immediately to not send playback info updates after release.
return true;
default:
@ -916,36 +915,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
playbackInfo = playbackInfo.copyWithPlaybackError(error);
}
/**
* Blocks the current thread until a condition becomes true or the specified amount of time has
* elapsed.
*
* <p>If the current thread is interrupted while waiting for the condition to become true, this
* method will restore the interrupt <b>after</b> the condition became true or the operation times
* out.
*
* @param condition The condition.
* @param timeoutMs The time in milliseconds to wait for the condition to become true.
*/
private synchronized void waitUninterruptibly(AtomicBoolean condition, long timeoutMs) {
long deadlineMs = clock.elapsedRealtime() + timeoutMs;
long remainingMs = timeoutMs;
boolean wasInterrupted = false;
while (!condition.get() && remainingMs > 0) {
try {
clock.onThreadBlocked();
wait(remainingMs);
} catch (InterruptedException e) {
wasInterrupted = true;
}
remainingMs = deadlineMs - clock.elapsedRealtime();
}
if (wasInterrupted) {
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
}
private void setState(int state) {
if (playbackInfo.playbackState != state) {
if (state != Player.STATE_BUFFERING) {
@ -1776,7 +1745,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
}
private void setForegroundModeInternal(
boolean foregroundMode, @Nullable AtomicBoolean processedFlag) {
boolean foregroundMode, @Nullable ConditionVariable processedCondition) {
if (this.foregroundMode != foregroundMode) {
this.foregroundMode = foregroundMode;
if (!foregroundMode) {
@ -1785,16 +1754,13 @@ import java.util.concurrent.atomic.AtomicBoolean;
}
}
}
if (processedFlag != null) {
synchronized (this) {
processedFlag.set(true);
notifyAll();
}
if (processedCondition != null) {
processedCondition.open();
}
}
private void setVideoOutputInternal(
@Nullable Object videoOutput, @Nullable AtomicBoolean processedFlag)
@Nullable Object videoOutput, @Nullable ConditionVariable processedCondition)
throws ExoPlaybackException {
for (RendererHolder renderer : renderers) {
renderer.setVideoOutput(videoOutput);
@ -1803,11 +1769,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|| playbackInfo.playbackState == Player.STATE_BUFFERING) {
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
if (processedFlag != null) {
synchronized (this) {
processedFlag.set(true);
notifyAll();
}
if (processedCondition != null) {
processedCondition.open();
}
}
@ -1823,7 +1786,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setState(Player.STATE_IDLE);
}
private void releaseInternal(AtomicBoolean processedFlag) {
private void releaseInternal(ConditionVariable processedCondition) {
try {
resetInternal(
/* resetRenderers= */ true,
@ -1837,10 +1800,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setState(Player.STATE_IDLE);
} finally {
playbackLooperProvider.releaseLooper();
synchronized (this) {
processedFlag.set(true);
notifyAll();
}
processedCondition.open();
}
}

View File

@ -161,7 +161,8 @@ public final class ExternallyLoadedImagePlaybackTest {
MediaSource.Factory mediaSourceFactory =
new DefaultMediaSourceFactory(applicationContext)
.setExternalImageLoader(
unused -> listeningExecutorService.submit(loadingComplete::blockUninterruptible));
unused ->
listeningExecutorService.submit(() -> loadingComplete.blockUninterruptible()));
ExoPlayer player =
new ExoPlayer.Builder(applicationContext, renderersFactory)
.setClock(new FakeClock(/* isAutoAdvancing= */ true))

View File

@ -24,6 +24,8 @@ import static org.junit.Assert.assertThrows;
import android.content.Context;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.effect.Presentation;
import androidx.media3.extractor.mp4.Mp4Extractor;
import androidx.media3.extractor.text.DefaultSubtitleParserFactory;
import androidx.media3.test.utils.FakeExtractorOutput;
@ -31,6 +33,7 @@ import androidx.media3.test.utils.FakeTrackOutput;
import androidx.media3.test.utils.TestUtil;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -60,7 +63,6 @@ public class TransformerGapsTest {
testId = testName.getMethodName();
}
// TODO: b/391111085 - Change test when gaps at the start of the sequence are supported.
@Test
public void export_withThreeMediaItemsAndFirstMediaItemHavingNoVideo_throws() {
Transformer transformer = new Transformer.Builder(context).build();
@ -77,6 +79,58 @@ public class TransformerGapsTest {
ExportException.class, () -> transformerAndroidTestRunner.run(testId, composition));
}
@Test
public void
export_withThreeMediaItemsAndFirstMediaItemHavingNoVideoAndForceVideoTrackSetToTrue_insertsBlankFramesForFirstMediaItem()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder(
AUDIO_ONLY_MEDIA_ITEM, AUDIO_VIDEO_MEDIA_ITEM, AUDIO_VIDEO_MEDIA_ITEM)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The video gap is for 1024 ms with 30 fps.
int expectedBlankFrames = 31;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
}
@Test
public void
export_withThreeMediaItemsAndSecondMediaItemHavingNoVideo_insertsBlankFramesForSecondMediaItem()
@ -100,7 +154,7 @@ public class TransformerGapsTest {
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1024ms with 30 fps.
// The gap is for 1024 ms with 30 fps.
int expectedBlankFrames = 31;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
@ -129,12 +183,62 @@ public class TransformerGapsTest {
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1024ms with 30 fps.
// The gap is for 1024 ms with 30 fps.
int expectedBlankFrames = 31;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
}
@Test
public void
export_withTwoVideoOnlyMediaItemsAndGapAtStartAndForceVideoTrackSetToTrue_insertsBlankFramesForGap()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
// The default output mime type is H265 which might not work on all the devices.
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(VIDEO_ONLY_MEDIA_ITEM)
.addItem(VIDEO_ONLY_MEDIA_ITEM)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1 sec with 30 fps.
int expectedBlankFrames = 30;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
}
@Test
public void export_withTwoVideoOnlyMediaItemsAndGapInMiddle_insertsBlankFramesForGap()
throws Exception {
@ -212,6 +316,83 @@ public class TransformerGapsTest {
assertThrows(IllegalArgumentException.class, sequenceBuilder::build);
}
@Test
public void export_withTwoMediaItemsAndGapAtStartAndOnlyForceAudioTrackSetToTrue_throws()
throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.setForceAudioTrack(true)
.build())
.build();
TransformerAndroidTestRunner transformerAndroidTestRunner =
new TransformerAndroidTestRunner.Builder(context, transformer).build();
assertThrows(
ExportException.class, () -> transformerAndroidTestRunner.run(testId, composition));
}
@Test
public void
export_withTwoMediaItemsAndGapAtStartAndBothForceAudioAndVideoTrackSetToTrue_insertsBlankFramesAndSilenceForGap()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
// The default output mime type is H265 which might not work on all the devices.
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.setForceAudioTrack(true)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1 sec with 30 fps.
int expectedBlankFrames = 30;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
FakeTrackOutput audioTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_AUDIO);
long lastAudioSampleTimestampUs =
audioTrackOutput.getSampleTimeUs(audioTrackOutput.getSampleCount() - 1);
// 1000 ms gap + 1024 ms audio + 1024 ms audio.
// Since audio samples are not deterministic, hence use a lower timestamp.
assertThat(lastAudioSampleTimestampUs).isGreaterThan(3_000_000);
}
@Test
public void export_withTwoMediaItemsAndGapInMiddle_insertsBlankFramesForGap() throws Exception {
assumeFormatsSupported(
@ -303,7 +484,7 @@ public class TransformerGapsTest {
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1024ms with 30 fps.
// The gap is for 1024 ms with 30 fps.
int expectedBlankFramesForAudioOnlyItem = 31;
// The gap is for 1 sec with 30 fps.
int expectedBlankFramesForOneSecGap = 30;

View File

@ -20,6 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import androidx.media3.common.MediaItem;
import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.util.UnstableApi;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.List;
@ -37,6 +38,7 @@ public final class EditedMediaItemSequence {
private final ImmutableList.Builder<EditedMediaItem> items;
private boolean isLooping;
private boolean forceAudioTrack;
private boolean forceVideoTrack;
/** Creates an instance. */
public Builder(EditedMediaItem... editedMediaItems) {
@ -55,6 +57,7 @@ public final class EditedMediaItemSequence {
.addAll(editedMediaItemSequence.editedMediaItems);
isLooping = editedMediaItemSequence.isLooping;
forceAudioTrack = editedMediaItemSequence.forceAudioTrack;
forceVideoTrack = editedMediaItemSequence.forceVideoTrack;
}
/**
@ -98,10 +101,9 @@ public final class EditedMediaItemSequence {
*
* <p>A gap is a period of time with no media.
*
* <p>If the gap is at the start of the sequence then {@linkplain #setForceAudioTrack(boolean)
* force audio track} flag must be set to force silent audio.
*
* <p>Gaps at the start of the sequence are not supported if the sequence has video.
* <p>If the gap is added at the start of the sequence, then {@linkplain
* #setForceAudioTrack(boolean) force audio track} or/and {@linkplain
* #setForceVideoTrack(boolean) force video track} flag must be set appropriately.
*
* @param durationUs The duration of the gap, in milliseconds.
* @return This builder, for convenience.
@ -165,6 +167,43 @@ public final class EditedMediaItemSequence {
return this;
}
/**
* Forces blank frames in the {@linkplain EditedMediaItemSequence sequence}.
*
* <p>This flag is necessary when:
*
* <ul>
* <li>The first {@link EditedMediaItem} in the sequence does not contain video, but
* subsequent items do.
* <li>The first item in the sequence is a {@linkplain #addGap(long) gap} and the subsequent
* {@linkplain EditedMediaItem media items} contain video.
* </ul>
*
* <p>If the flag is not set appropriately, then the export will {@linkplain
* Transformer.Listener#onError(Composition, ExportResult, ExportException) fail}.
*
* <p>If the first {@link EditedMediaItem} already contains video, this flag has no effect.
*
* <p>The MIME type of the output's video track can be set using {@link
* Transformer.Builder#setVideoMimeType(String)}.
*
* <p>The output resolution must be set using a {@link Presentation} effect on the {@link
* Composition}.
*
* <p>Forcing a video track and {@linkplain Composition.Builder#setTransmuxVideo(boolean)
* requesting video transmuxing} are not allowed together because generating blank frames
* requires transcoding.
*
* <p>The default value is {@code false}.
*
* @param forceVideoTrack Whether to force video track.
*/
@CanIgnoreReturnValue
public Builder setForceVideoTrack(boolean forceVideoTrack) {
this.forceVideoTrack = forceVideoTrack;
return this;
}
/**
* Builds the {@link EditedMediaItemSequence}.
*
@ -199,6 +238,9 @@ public final class EditedMediaItemSequence {
/** Forces silent audio in the {@linkplain EditedMediaItemSequence sequence}. */
public final boolean forceAudioTrack;
/** Forces blank frames in the {@linkplain EditedMediaItemSequence sequence}. */
public final boolean forceVideoTrack;
/**
* @deprecated Use {@link Builder}.
*/
@ -234,10 +276,12 @@ public final class EditedMediaItemSequence {
checkArgument(
!editedMediaItems.isEmpty(), "The sequence must contain at least one EditedMediaItem.");
checkArgument(
!editedMediaItems.get(0).isGap() || builder.forceAudioTrack,
"If the first item in the sequence is a Gap, then forceAudioTrack flag must be set");
!editedMediaItems.get(0).isGap() || builder.forceAudioTrack || builder.forceVideoTrack,
"If the first item in the sequence is a Gap, then forceAudioTrack or forceVideoTrack flag"
+ " must be set");
this.isLooping = builder.isLooping;
this.forceAudioTrack = builder.forceAudioTrack;
this.forceVideoTrack = builder.forceVideoTrack;
}
/** Return whether any items are a {@linkplain Builder#addGap(long) gap}. */

View File

@ -83,6 +83,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final List<EditedMediaItem> editedMediaItems;
private final boolean isLooping;
private final boolean forceAudioTrack;
private final boolean forceVideoTrack;
private final Factory assetLoaderFactory;
private final CompositionSettings compositionSettings;
private final Listener sequenceAssetLoaderListener;
@ -139,6 +140,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
editedMediaItems = sequence.editedMediaItems;
isLooping = sequence.isLooping;
this.forceAudioTrack = sequence.forceAudioTrack;
this.forceVideoTrack = sequence.forceVideoTrack;
this.assetLoaderFactory = new GapInterceptingAssetLoaderFactory(assetLoaderFactory);
this.compositionSettings = compositionSettings;
sequenceAssetLoaderListener = listener;
@ -271,10 +273,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return decode;
}
boolean addForcedAudioTrack = forceAudioTrack && reportedTrackCount.get() == 1 && !isAudio;
boolean addForcedAudioTrack = false;
boolean addForcedVideoTrack = false;
if (reportedTrackCount.get() == 1) {
addForcedAudioTrack = forceAudioTrack && !isAudio;
addForcedVideoTrack = forceVideoTrack && isAudio;
}
if (!isTrackCountReported) {
int trackCount = reportedTrackCount.get() + (addForcedAudioTrack ? 1 : 0);
int trackCount =
reportedTrackCount.get() + (addForcedAudioTrack || addForcedVideoTrack ? 1 : 0);
sequenceAssetLoaderListener.onTrackCount(trackCount);
isTrackCountReported = true;
}
@ -293,6 +301,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
FORCE_AUDIO_TRACK_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED);
decodeAudio = true;
}
if (addForcedVideoTrack) {
sequenceAssetLoaderListener.onTrackAdded(
BLANK_IMAGE_BITMAP_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED);
decodeVideo = true;
}
return decodeOutput;
}
@ -324,30 +337,41 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
sampleConsumer = new SampleConsumerWrapper(wrappedSampleConsumer, trackType);
sampleConsumersByTrackType.put(trackType, sampleConsumer);
if (forceAudioTrack && reportedTrackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) {
SampleConsumer wrappedAudioSampleConsumer =
checkStateNotNull(
sequenceAssetLoaderListener.onOutputFormat(
FORCE_AUDIO_TRACK_FORMAT
.buildUpon()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(C.ENCODING_PCM_16BIT)
.build()));
sampleConsumersByTrackType.put(
C.TRACK_TYPE_AUDIO,
new SampleConsumerWrapper(wrappedAudioSampleConsumer, C.TRACK_TYPE_AUDIO));
if (reportedTrackCount.get() == 1) {
if (forceAudioTrack && trackType == C.TRACK_TYPE_VIDEO) {
SampleConsumer wrappedAudioSampleConsumer =
checkStateNotNull(
sequenceAssetLoaderListener.onOutputFormat(
FORCE_AUDIO_TRACK_FORMAT
.buildUpon()
.setSampleMimeType(MimeTypes.AUDIO_RAW)
.setPcmEncoding(C.ENCODING_PCM_16BIT)
.build()));
sampleConsumersByTrackType.put(
C.TRACK_TYPE_AUDIO,
new SampleConsumerWrapper(wrappedAudioSampleConsumer, C.TRACK_TYPE_AUDIO));
} else if (forceVideoTrack && trackType == C.TRACK_TYPE_AUDIO) {
SampleConsumer wrappedVideoSampleConsumer =
checkStateNotNull(
sequenceAssetLoaderListener.onOutputFormat(BLANK_IMAGE_BITMAP_FORMAT));
sampleConsumersByTrackType.put(
C.TRACK_TYPE_VIDEO,
new SampleConsumerWrapper(wrappedVideoSampleConsumer, C.TRACK_TYPE_VIDEO));
}
}
} else {
String missingTrackMessage =
trackType == C.TRACK_TYPE_AUDIO
? "The preceding MediaItem does not contain any audio track. If the sequence starts"
+ " with an item without audio track (like images), followed by items with"
+ " audio tracks, then EditedMediaItemSequence.Builder.setForceAudioTrack()"
+ " needs to be set to true."
: "The preceding MediaItem does not contain any video track. If the sequence starts"
+ " with an item without video track (audio only), followed by items with video"
+ " tracks, then EditedMediaItemSequence.Builder.setForceVideoTrack() needs to"
+ " be set to true.";
sampleConsumer =
checkStateNotNull(
sampleConsumersByTrackType.get(trackType),
Util.formatInvariant(
"The preceding MediaItem does not contain any track of type %d. If the"
+ " Composition contains a sequence that starts with items without audio"
+ " tracks (like images), followed by items with audio tracks,"
+ " Composition.Builder.experimentalSetForceAudioTrack() needs to be set to"
+ " true.",
trackType));
checkStateNotNull(sampleConsumersByTrackType.get(trackType), missingTrackMessage);
}
onMediaItemChanged(trackType, format);
if (reportedTrackCount.get() == 1 && sampleConsumersByTrackType.size() == 2) {
@ -700,7 +724,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private GapSignalingAssetLoader(long durationUs) {
this.durationUs = durationUs;
shouldProduceAudio = sequenceHasAudio || forceAudioTrack;
shouldProduceVideo = sequenceHasVideo;
shouldProduceVideo = sequenceHasVideo || forceVideoTrack;
checkState(shouldProduceAudio || shouldProduceVideo);
this.audioTrackFormat = new Format.Builder().setSampleMimeType(MimeTypes.AUDIO_RAW).build();
this.audioTrackDecodedFormat =