Compare commits

...

2 Commits

Author SHA1 Message Date
sheenachhabra
bd14b753ee Add forceVideoTrack flag on EditedMediaItemSequence
This is similar to forceAudioTrack.

PiperOrigin-RevId: 743235372
2025-04-02 12:24:58 -07:00
tonihei
989e9f9e84 Remove remaining synchronized keywords from EPII
These are needed for the `waitUninterruptibly` handling, which is
really just waiting for a condition to become true on another thread
with a timeout, as well as Clock and interrupt handling.

We already have ConditionVariable that serves this purpose, which
has methods with a timeout and with interrupt handling. Adding
another version of the call with both timeout and interrupt handling
allows to replace the EPII manual code. The ConditionVariable methods
were also missing the clock calls to signal a wait operation.

PiperOrigin-RevId: 743214709
2025-04-02 11:30:04 -07:00
7 changed files with 416 additions and 109 deletions

View File

@ -82,6 +82,7 @@ public class ConditionVariable {
*/ */
public synchronized void block() throws InterruptedException { public synchronized void block() throws InterruptedException {
while (!isOpen) { while (!isOpen) {
clock.onThreadBlocked();
wait(); wait();
} }
} }
@ -105,6 +106,7 @@ public class ConditionVariable {
block(); block();
} else { } else {
while (!isOpen && nowMs < endMs) { while (!isOpen && nowMs < endMs) {
clock.onThreadBlocked();
wait(endMs - nowMs); wait(endMs - nowMs);
nowMs = clock.elapsedRealtime(); nowMs = clock.elapsedRealtime();
} }
@ -113,14 +115,17 @@ public class ConditionVariable {
} }
/** /**
* Blocks until the condition is open. Unlike {@link #block}, this method will continue to block * Blocks until the condition is open.
* if the calling thread is interrupted. If the calling thread was interrupted then its {@link *
* Thread#isInterrupted() interrupted status} will be set when the method returns. * <p>Unlike {@link #block}, this method will continue to block if the calling thread is
* interrupted. If the calling thread was interrupted then its {@link Thread#isInterrupted()
* interrupted status} will be set when the method returns.
*/ */
public synchronized void blockUninterruptible() { public synchronized void blockUninterruptible() {
boolean wasInterrupted = false; boolean wasInterrupted = false;
while (!isOpen) { while (!isOpen) {
try { try {
clock.onThreadBlocked();
wait(); wait();
} catch (InterruptedException e) { } catch (InterruptedException e) {
wasInterrupted = true; wasInterrupted = true;
@ -132,6 +137,45 @@ public class ConditionVariable {
} }
} }
/**
* Blocks until the condition is open or until {@code timeoutMs} have passed.
*
* <p>Unlike {@link #block}, this method will continue to block if the calling thread is
* interrupted. If the calling thread was interrupted then its {@link Thread#isInterrupted()
* interrupted status} will be set when the method returns.
*
* @param timeoutMs The maximum time to wait in milliseconds. If {@code timeoutMs <= 0} then the
* call will return immediately without blocking.
* @return True if the condition was opened, false if the call returns because of the timeout.
*/
public synchronized boolean blockUninterruptible(long timeoutMs) {
if (timeoutMs <= 0) {
return isOpen;
}
long nowMs = clock.elapsedRealtime();
long endMs = nowMs + timeoutMs;
if (endMs < nowMs) {
// timeoutMs is large enough for (nowMs + timeoutMs) to rollover. Block indefinitely.
blockUninterruptible();
} else {
boolean wasInterrupted = false;
while (!isOpen && nowMs < endMs) {
try {
clock.onThreadBlocked();
wait(endMs - nowMs);
} catch (InterruptedException e) {
wasInterrupted = true;
}
nowMs = clock.elapsedRealtime();
}
if (wasInterrupted) {
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
}
return isOpen;
}
/** Returns whether the condition is opened. */ /** Returns whether the condition is opened. */
public synchronized boolean isOpen() { public synchronized boolean isOpen() {
return isOpen; return isOpen;

View File

@ -33,14 +33,14 @@ public class ConditionVariableTest {
} }
@Test @Test
public void blockWithTimeout_timesOut() throws InterruptedException { public void block_withTimeoutUnopened_timesOut() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
assertThat(conditionVariable.block(1)).isFalse(); assertThat(conditionVariable.block(1)).isFalse();
assertThat(conditionVariable.isOpen()).isFalse(); assertThat(conditionVariable.isOpen()).isFalse();
} }
@Test @Test
public void blockWithTimeout_blocksForAtLeastTimeout() throws InterruptedException { public void block_withTimeoutUnopened_blocksForAtLeastTimeout() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
long startTimeMs = System.currentTimeMillis(); long startTimeMs = System.currentTimeMillis();
assertThat(conditionVariable.block(/* timeoutMs= */ 500)).isFalse(); assertThat(conditionVariable.block(/* timeoutMs= */ 500)).isFalse();
@ -49,7 +49,8 @@ public class ConditionVariableTest {
} }
@Test @Test
public void blockWithMaxTimeout_blocks_thenThrowsWhenInterrupted() throws InterruptedException { public void block_withMaxTimeoutUnopened_blocksThenThrowsWhenInterrupted()
throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean(); AtomicBoolean blockReturned = new AtomicBoolean();
@ -76,7 +77,7 @@ public class ConditionVariableTest {
} }
@Test @Test
public void block_blocks_thenThrowsWhenInterrupted() throws InterruptedException { public void block_unopened_blocksThenThrowsWhenInterrupted() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean(); AtomicBoolean blockReturned = new AtomicBoolean();
@ -103,7 +104,7 @@ public class ConditionVariableTest {
} }
@Test @Test
public void block_blocks_thenReturnsWhenOpened() throws InterruptedException { public void block_opened_blocksThenReturnsWhenOpened() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean(); AtomicBoolean blockReturned = new AtomicBoolean();
@ -130,7 +131,7 @@ public class ConditionVariableTest {
} }
@Test @Test
public void blockUnterruptible_blocksIfInterrupted_thenUnblocksWhenOpened() public void blockUninterruptible_blocksIfInterruptedThenUnblocksWhenOpened()
throws InterruptedException { throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable(); ConditionVariable conditionVariable = buildTestConditionVariable();
@ -140,8 +141,8 @@ public class ConditionVariableTest {
new Thread( new Thread(
() -> { () -> {
conditionVariable.blockUninterruptible(); conditionVariable.blockUninterruptible();
blockReturned.set(true);
interruptedStatusSet.set(Thread.currentThread().isInterrupted()); interruptedStatusSet.set(Thread.currentThread().isInterrupted());
blockReturned.set(true);
}); });
blockingThread.start(); blockingThread.start();
@ -160,6 +161,58 @@ public class ConditionVariableTest {
assertThat(conditionVariable.isOpen()).isTrue(); assertThat(conditionVariable.isOpen()).isTrue();
} }
@Test
public void blockUninterruptible_withTimeoutUnopened_timesOut() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
assertThat(conditionVariable.blockUninterruptible(1)).isFalse();
assertThat(conditionVariable.isOpen()).isFalse();
}
@Test
public void blockUninterruptible_withTimeoutUnopened_blocksForAtLeastTimeout()
throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
long startTimeMs = System.currentTimeMillis();
assertThat(conditionVariable.blockUninterruptible(/* timeoutMs= */ 500)).isFalse();
long endTimeMs = System.currentTimeMillis();
assertThat(endTimeMs - startTimeMs).isAtLeast(500);
}
@Test
public void blockUninterruptible_withMaxTimeout_blocksUntilOpened() throws InterruptedException {
ConditionVariable conditionVariable = buildTestConditionVariable();
AtomicBoolean blockReturned = new AtomicBoolean();
AtomicBoolean interruptedStatusSet = new AtomicBoolean();
Thread blockingThread =
new Thread(
() -> {
conditionVariable.blockUninterruptible(/* timeoutMs= */ Long.MAX_VALUE);
interruptedStatusSet.set(Thread.currentThread().isInterrupted());
blockReturned.set(true);
});
blockingThread.start();
Thread.sleep(500);
assertThat(blockReturned.get()).isFalse();
blockingThread.interrupt();
Thread.sleep(500);
// blockUninterruptible should still be blocked.
assertThat(blockReturned.get()).isFalse();
conditionVariable.open();
blockingThread.join();
// blockUninterruptible should have set the thread's interrupted status on exit.
assertThat(interruptedStatusSet.get()).isTrue();
assertThat(conditionVariable.isOpen()).isTrue();
}
private static ConditionVariable buildTestConditionVariable() { private static ConditionVariable buildTestConditionVariable() {
return new ConditionVariable( return new ConditionVariable(
new SystemClock() { new SystemClock() {

View File

@ -54,6 +54,7 @@ import androidx.media3.common.Player.RepeatMode;
import androidx.media3.common.Timeline; import androidx.media3.common.Timeline;
import androidx.media3.common.util.Assertions; import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.Clock; import androidx.media3.common.util.Clock;
import androidx.media3.common.util.ConditionVariable;
import androidx.media3.common.util.HandlerWrapper; import androidx.media3.common.util.HandlerWrapper;
import androidx.media3.common.util.Log; import androidx.media3.common.util.Log;
import androidx.media3.common.util.TraceUtil; import androidx.media3.common.util.TraceUtil;
@ -80,7 +81,6 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
/** Implements the internal behavior of {@link ExoPlayerImpl}. */ /** Implements the internal behavior of {@link ExoPlayerImpl}. */
/* package */ final class ExoPlayerImplInternal /* package */ final class ExoPlayerImplInternal
@ -536,12 +536,11 @@ import java.util.concurrent.atomic.AtomicBoolean;
handler.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 1, 0).sendToTarget(); handler.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 1, 0).sendToTarget();
return true; return true;
} else { } else {
AtomicBoolean processedFlag = new AtomicBoolean(); ConditionVariable processedCondition = new ConditionVariable(clock);
handler handler
.obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedFlag) .obtainMessage(MSG_SET_FOREGROUND_MODE, /* foregroundMode */ 0, 0, processedCondition)
.sendToTarget(); .sendToTarget();
waitUninterruptibly(processedFlag, setForegroundModeTimeoutMs); return processedCondition.blockUninterruptible(setForegroundModeTimeoutMs);
return processedFlag.get();
} }
} }
@ -560,13 +559,12 @@ import java.util.concurrent.atomic.AtomicBoolean;
if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) { if (releasedOnApplicationThread || !playbackLooper.getThread().isAlive()) {
return true; return true;
} }
AtomicBoolean processedFlag = new AtomicBoolean(); ConditionVariable processedCondition = new ConditionVariable(clock);
handler handler
.obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedFlag)) .obtainMessage(MSG_SET_VIDEO_OUTPUT, new Pair<>(videoOutput, processedCondition))
.sendToTarget(); .sendToTarget();
if (timeoutMs != C.TIME_UNSET) { if (timeoutMs != C.TIME_UNSET) {
waitUninterruptibly(processedFlag, timeoutMs); return processedCondition.blockUninterruptible(timeoutMs);
return processedFlag.get();
} }
return true; return true;
} }
@ -581,10 +579,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
return true; return true;
} }
releasedOnApplicationThread = true; releasedOnApplicationThread = true;
AtomicBoolean processedFlag = new AtomicBoolean(); ConditionVariable processedCondition = new ConditionVariable(clock);
handler.obtainMessage(MSG_RELEASE, processedFlag).sendToTarget(); handler.obtainMessage(MSG_RELEASE, processedCondition).sendToTarget();
waitUninterruptibly(processedFlag, releaseTimeoutMs); return processedCondition.blockUninterruptible(releaseTimeoutMs);
return processedFlag.get();
} }
public Looper getPlaybackLooper() { public Looper getPlaybackLooper() {
@ -707,13 +704,15 @@ import java.util.concurrent.atomic.AtomicBoolean;
break; break;
case MSG_SET_FOREGROUND_MODE: case MSG_SET_FOREGROUND_MODE:
setForegroundModeInternal( setForegroundModeInternal(
/* foregroundMode= */ msg.arg1 != 0, /* processedFlag= */ (AtomicBoolean) msg.obj); /* foregroundMode= */ msg.arg1 != 0,
/* processedCondition= */ (ConditionVariable) msg.obj);
break; break;
case MSG_SET_VIDEO_OUTPUT: case MSG_SET_VIDEO_OUTPUT:
Pair<Object, AtomicBoolean> setVideoOutputPayload = (Pair<Object, AtomicBoolean>) msg.obj; Pair<Object, ConditionVariable> setVideoOutputPayload =
(Pair<Object, ConditionVariable>) msg.obj;
setVideoOutputInternal( setVideoOutputInternal(
/* videoOutput= */ setVideoOutputPayload.first, /* videoOutput= */ setVideoOutputPayload.first,
/* processedFlag= */ setVideoOutputPayload.second); /* processedCondition= */ setVideoOutputPayload.second);
break; break;
case MSG_STOP: case MSG_STOP:
stopInternal(/* forceResetRenderers= */ false, /* acknowledgeStop= */ true); stopInternal(/* forceResetRenderers= */ false, /* acknowledgeStop= */ true);
@ -783,7 +782,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setVideoFrameMetadataListenerInternal((VideoFrameMetadataListener) msg.obj); setVideoFrameMetadataListenerInternal((VideoFrameMetadataListener) msg.obj);
break; break;
case MSG_RELEASE: case MSG_RELEASE:
releaseInternal(/* processedFlag= */ (AtomicBoolean) msg.obj); releaseInternal(/* processedCondition= */ (ConditionVariable) msg.obj);
// Return immediately to not send playback info updates after release. // Return immediately to not send playback info updates after release.
return true; return true;
default: default:
@ -916,36 +915,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
playbackInfo = playbackInfo.copyWithPlaybackError(error); playbackInfo = playbackInfo.copyWithPlaybackError(error);
} }
/**
* Blocks the current thread until a condition becomes true or the specified amount of time has
* elapsed.
*
* <p>If the current thread is interrupted while waiting for the condition to become true, this
* method will restore the interrupt <b>after</b> the condition became true or the operation times
* out.
*
* @param condition The condition.
* @param timeoutMs The time in milliseconds to wait for the condition to become true.
*/
private synchronized void waitUninterruptibly(AtomicBoolean condition, long timeoutMs) {
long deadlineMs = clock.elapsedRealtime() + timeoutMs;
long remainingMs = timeoutMs;
boolean wasInterrupted = false;
while (!condition.get() && remainingMs > 0) {
try {
clock.onThreadBlocked();
wait(remainingMs);
} catch (InterruptedException e) {
wasInterrupted = true;
}
remainingMs = deadlineMs - clock.elapsedRealtime();
}
if (wasInterrupted) {
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
}
private void setState(int state) { private void setState(int state) {
if (playbackInfo.playbackState != state) { if (playbackInfo.playbackState != state) {
if (state != Player.STATE_BUFFERING) { if (state != Player.STATE_BUFFERING) {
@ -1776,7 +1745,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
} }
private void setForegroundModeInternal( private void setForegroundModeInternal(
boolean foregroundMode, @Nullable AtomicBoolean processedFlag) { boolean foregroundMode, @Nullable ConditionVariable processedCondition) {
if (this.foregroundMode != foregroundMode) { if (this.foregroundMode != foregroundMode) {
this.foregroundMode = foregroundMode; this.foregroundMode = foregroundMode;
if (!foregroundMode) { if (!foregroundMode) {
@ -1785,16 +1754,13 @@ import java.util.concurrent.atomic.AtomicBoolean;
} }
} }
} }
if (processedFlag != null) { if (processedCondition != null) {
synchronized (this) { processedCondition.open();
processedFlag.set(true);
notifyAll();
}
} }
} }
private void setVideoOutputInternal( private void setVideoOutputInternal(
@Nullable Object videoOutput, @Nullable AtomicBoolean processedFlag) @Nullable Object videoOutput, @Nullable ConditionVariable processedCondition)
throws ExoPlaybackException { throws ExoPlaybackException {
for (RendererHolder renderer : renderers) { for (RendererHolder renderer : renderers) {
renderer.setVideoOutput(videoOutput); renderer.setVideoOutput(videoOutput);
@ -1803,11 +1769,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
|| playbackInfo.playbackState == Player.STATE_BUFFERING) { || playbackInfo.playbackState == Player.STATE_BUFFERING) {
handler.sendEmptyMessage(MSG_DO_SOME_WORK); handler.sendEmptyMessage(MSG_DO_SOME_WORK);
} }
if (processedFlag != null) { if (processedCondition != null) {
synchronized (this) { processedCondition.open();
processedFlag.set(true);
notifyAll();
}
} }
} }
@ -1823,7 +1786,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setState(Player.STATE_IDLE); setState(Player.STATE_IDLE);
} }
private void releaseInternal(AtomicBoolean processedFlag) { private void releaseInternal(ConditionVariable processedCondition) {
try { try {
resetInternal( resetInternal(
/* resetRenderers= */ true, /* resetRenderers= */ true,
@ -1837,10 +1800,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
setState(Player.STATE_IDLE); setState(Player.STATE_IDLE);
} finally { } finally {
playbackLooperProvider.releaseLooper(); playbackLooperProvider.releaseLooper();
synchronized (this) { processedCondition.open();
processedFlag.set(true);
notifyAll();
}
} }
} }

View File

@ -161,7 +161,8 @@ public final class ExternallyLoadedImagePlaybackTest {
MediaSource.Factory mediaSourceFactory = MediaSource.Factory mediaSourceFactory =
new DefaultMediaSourceFactory(applicationContext) new DefaultMediaSourceFactory(applicationContext)
.setExternalImageLoader( .setExternalImageLoader(
unused -> listeningExecutorService.submit(loadingComplete::blockUninterruptible)); unused ->
listeningExecutorService.submit(() -> loadingComplete.blockUninterruptible()));
ExoPlayer player = ExoPlayer player =
new ExoPlayer.Builder(applicationContext, renderersFactory) new ExoPlayer.Builder(applicationContext, renderersFactory)
.setClock(new FakeClock(/* isAutoAdvancing= */ true)) .setClock(new FakeClock(/* isAutoAdvancing= */ true))

View File

@ -24,6 +24,8 @@ import static org.junit.Assert.assertThrows;
import android.content.Context; import android.content.Context;
import androidx.media3.common.C; import androidx.media3.common.C;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.effect.Presentation;
import androidx.media3.extractor.mp4.Mp4Extractor; import androidx.media3.extractor.mp4.Mp4Extractor;
import androidx.media3.extractor.text.DefaultSubtitleParserFactory; import androidx.media3.extractor.text.DefaultSubtitleParserFactory;
import androidx.media3.test.utils.FakeExtractorOutput; import androidx.media3.test.utils.FakeExtractorOutput;
@ -31,6 +33,7 @@ import androidx.media3.test.utils.FakeTrackOutput;
import androidx.media3.test.utils.TestUtil; import androidx.media3.test.utils.TestUtil;
import androidx.test.core.app.ApplicationProvider; import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.collect.ImmutableList;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -60,7 +63,6 @@ public class TransformerGapsTest {
testId = testName.getMethodName(); testId = testName.getMethodName();
} }
// TODO: b/391111085 - Change test when gaps at the start of the sequence are supported.
@Test @Test
public void export_withThreeMediaItemsAndFirstMediaItemHavingNoVideo_throws() { public void export_withThreeMediaItemsAndFirstMediaItemHavingNoVideo_throws() {
Transformer transformer = new Transformer.Builder(context).build(); Transformer transformer = new Transformer.Builder(context).build();
@ -77,6 +79,58 @@ public class TransformerGapsTest {
ExportException.class, () -> transformerAndroidTestRunner.run(testId, composition)); ExportException.class, () -> transformerAndroidTestRunner.run(testId, composition));
} }
@Test
public void
export_withThreeMediaItemsAndFirstMediaItemHavingNoVideoAndForceVideoTrackSetToTrue_insertsBlankFramesForFirstMediaItem()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET.videoFormat);
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder(
AUDIO_ONLY_MEDIA_ITEM, AUDIO_VIDEO_MEDIA_ITEM, AUDIO_VIDEO_MEDIA_ITEM)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The video gap is for 1024 ms with 30 fps.
int expectedBlankFrames = 31;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
}
@Test @Test
public void public void
export_withThreeMediaItemsAndSecondMediaItemHavingNoVideo_insertsBlankFramesForSecondMediaItem() export_withThreeMediaItemsAndSecondMediaItemHavingNoVideo_insertsBlankFramesForSecondMediaItem()
@ -135,6 +189,56 @@ public class TransformerGapsTest {
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames); .isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
} }
@Test
public void
export_withTwoVideoOnlyMediaItemsAndGapAtStartAndForceVideoTrackSetToTrue_insertsBlankFramesForGap()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
// The default output mime type is H265 which might not work on all the devices.
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(VIDEO_ONLY_MEDIA_ITEM)
.addItem(VIDEO_ONLY_MEDIA_ITEM)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1 sec with 30 fps.
int expectedBlankFrames = 30;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
}
@Test @Test
public void export_withTwoVideoOnlyMediaItemsAndGapInMiddle_insertsBlankFramesForGap() public void export_withTwoVideoOnlyMediaItemsAndGapInMiddle_insertsBlankFramesForGap()
throws Exception { throws Exception {
@ -212,6 +316,83 @@ public class TransformerGapsTest {
assertThrows(IllegalArgumentException.class, sequenceBuilder::build); assertThrows(IllegalArgumentException.class, sequenceBuilder::build);
} }
@Test
public void export_withTwoMediaItemsAndGapAtStartAndOnlyForceAudioTrackSetToTrue_throws()
throws Exception {
Transformer transformer = new Transformer.Builder(context).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.setForceAudioTrack(true)
.build())
.build();
TransformerAndroidTestRunner transformerAndroidTestRunner =
new TransformerAndroidTestRunner.Builder(context, transformer).build();
assertThrows(
ExportException.class, () -> transformerAndroidTestRunner.run(testId, composition));
}
@Test
public void
export_withTwoMediaItemsAndGapAtStartAndBothForceAudioAndVideoTrackSetToTrue_insertsBlankFramesAndSilenceForGap()
throws Exception {
int outputWidth = 320;
int outputHeight = 240;
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET.videoFormat,
/* outputFormat= */ MP4_ASSET
.videoFormat
.buildUpon()
.setWidth(outputWidth)
.setHeight(outputHeight)
.build());
// The default output mime type is H265 which might not work on all the devices.
Transformer transformer =
new Transformer.Builder(context).setVideoMimeType(MimeTypes.VIDEO_H264).build();
Composition composition =
new Composition.Builder(
new EditedMediaItemSequence.Builder()
.addGap(/* durationUs= */ 1_000_000)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.addItem(AUDIO_VIDEO_MEDIA_ITEM)
.setForceAudioTrack(true)
.setForceVideoTrack(true)
.build())
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(
Presentation.createForWidthAndHeight(
outputWidth, outputHeight, Presentation.LAYOUT_SCALE_TO_FIT))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, composition);
FakeExtractorOutput fakeExtractorOutput =
TestUtil.extractAllSamplesFromFilePath(
new Mp4Extractor(new DefaultSubtitleParserFactory()), result.filePath);
FakeTrackOutput videoTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_VIDEO);
// The gap is for 1 sec with 30 fps.
int expectedBlankFrames = 30;
assertThat(videoTrackOutput.getSampleCount())
.isEqualTo(2 * MP4_ASSET.videoFrameCount + expectedBlankFrames);
FakeTrackOutput audioTrackOutput = getTrackOutput(fakeExtractorOutput, C.TRACK_TYPE_AUDIO);
long lastAudioSampleTimestampUs =
audioTrackOutput.getSampleTimeUs(audioTrackOutput.getSampleCount() - 1);
// 1000 ms gap + 1024 ms audio + 1024 ms audio.
// Since audio samples are not deterministic, hence use a lower timestamp.
assertThat(lastAudioSampleTimestampUs).isGreaterThan(3_000_000);
}
@Test @Test
public void export_withTwoMediaItemsAndGapInMiddle_insertsBlankFramesForGap() throws Exception { public void export_withTwoMediaItemsAndGapInMiddle_insertsBlankFramesForGap() throws Exception {
assumeFormatsSupported( assumeFormatsSupported(

View File

@ -20,6 +20,7 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import androidx.media3.common.MediaItem; import androidx.media3.common.MediaItem;
import androidx.media3.common.audio.AudioProcessor; import androidx.media3.common.audio.AudioProcessor;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.effect.Presentation;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.util.List; import java.util.List;
@ -37,6 +38,7 @@ public final class EditedMediaItemSequence {
private final ImmutableList.Builder<EditedMediaItem> items; private final ImmutableList.Builder<EditedMediaItem> items;
private boolean isLooping; private boolean isLooping;
private boolean forceAudioTrack; private boolean forceAudioTrack;
private boolean forceVideoTrack;
/** Creates an instance. */ /** Creates an instance. */
public Builder(EditedMediaItem... editedMediaItems) { public Builder(EditedMediaItem... editedMediaItems) {
@ -55,6 +57,7 @@ public final class EditedMediaItemSequence {
.addAll(editedMediaItemSequence.editedMediaItems); .addAll(editedMediaItemSequence.editedMediaItems);
isLooping = editedMediaItemSequence.isLooping; isLooping = editedMediaItemSequence.isLooping;
forceAudioTrack = editedMediaItemSequence.forceAudioTrack; forceAudioTrack = editedMediaItemSequence.forceAudioTrack;
forceVideoTrack = editedMediaItemSequence.forceVideoTrack;
} }
/** /**
@ -98,10 +101,9 @@ public final class EditedMediaItemSequence {
* *
* <p>A gap is a period of time with no media. * <p>A gap is a period of time with no media.
* *
* <p>If the gap is at the start of the sequence then {@linkplain #setForceAudioTrack(boolean) * <p>If the gap is added at the start of the sequence, then {@linkplain
* force audio track} flag must be set to force silent audio. * #setForceAudioTrack(boolean) force audio track} or/and {@linkplain
* * #setForceVideoTrack(boolean) force video track} flag must be set appropriately.
* <p>Gaps at the start of the sequence are not supported if the sequence has video.
* *
* @param durationUs The duration of the gap, in milliseconds. * @param durationUs The duration of the gap, in milliseconds.
* @return This builder, for convenience. * @return This builder, for convenience.
@ -165,6 +167,43 @@ public final class EditedMediaItemSequence {
return this; return this;
} }
/**
* Forces blank frames in the {@linkplain EditedMediaItemSequence sequence}.
*
* <p>This flag is necessary when:
*
* <ul>
* <li>The first {@link EditedMediaItem} in the sequence does not contain video, but
* subsequent items do.
* <li>The first item in the sequence is a {@linkplain #addGap(long) gap} and the subsequent
* {@linkplain EditedMediaItem media items} contain video.
* </ul>
*
* <p>If the flag is not set appropriately, then the export will {@linkplain
* Transformer.Listener#onError(Composition, ExportResult, ExportException) fail}.
*
* <p>If the first {@link EditedMediaItem} already contains video, this flag has no effect.
*
* <p>The MIME type of the output's video track can be set using {@link
* Transformer.Builder#setVideoMimeType(String)}.
*
* <p>The output resolution must be set using a {@link Presentation} effect on the {@link
* Composition}.
*
* <p>Forcing a video track and {@linkplain Composition.Builder#setTransmuxVideo(boolean)
* requesting video transmuxing} are not allowed together because generating blank frames
* requires transcoding.
*
* <p>The default value is {@code false}.
*
* @param forceVideoTrack Whether to force video track.
*/
@CanIgnoreReturnValue
public Builder setForceVideoTrack(boolean forceVideoTrack) {
this.forceVideoTrack = forceVideoTrack;
return this;
}
/** /**
* Builds the {@link EditedMediaItemSequence}. * Builds the {@link EditedMediaItemSequence}.
* *
@ -199,6 +238,9 @@ public final class EditedMediaItemSequence {
/** Forces silent audio in the {@linkplain EditedMediaItemSequence sequence}. */ /** Forces silent audio in the {@linkplain EditedMediaItemSequence sequence}. */
public final boolean forceAudioTrack; public final boolean forceAudioTrack;
/** Forces blank frames in the {@linkplain EditedMediaItemSequence sequence}. */
public final boolean forceVideoTrack;
/** /**
* @deprecated Use {@link Builder}. * @deprecated Use {@link Builder}.
*/ */
@ -234,10 +276,12 @@ public final class EditedMediaItemSequence {
checkArgument( checkArgument(
!editedMediaItems.isEmpty(), "The sequence must contain at least one EditedMediaItem."); !editedMediaItems.isEmpty(), "The sequence must contain at least one EditedMediaItem.");
checkArgument( checkArgument(
!editedMediaItems.get(0).isGap() || builder.forceAudioTrack, !editedMediaItems.get(0).isGap() || builder.forceAudioTrack || builder.forceVideoTrack,
"If the first item in the sequence is a Gap, then forceAudioTrack flag must be set"); "If the first item in the sequence is a Gap, then forceAudioTrack or forceVideoTrack flag"
+ " must be set");
this.isLooping = builder.isLooping; this.isLooping = builder.isLooping;
this.forceAudioTrack = builder.forceAudioTrack; this.forceAudioTrack = builder.forceAudioTrack;
this.forceVideoTrack = builder.forceVideoTrack;
} }
/** Return whether any items are a {@linkplain Builder#addGap(long) gap}. */ /** Return whether any items are a {@linkplain Builder#addGap(long) gap}. */

View File

@ -83,6 +83,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final List<EditedMediaItem> editedMediaItems; private final List<EditedMediaItem> editedMediaItems;
private final boolean isLooping; private final boolean isLooping;
private final boolean forceAudioTrack; private final boolean forceAudioTrack;
private final boolean forceVideoTrack;
private final Factory assetLoaderFactory; private final Factory assetLoaderFactory;
private final CompositionSettings compositionSettings; private final CompositionSettings compositionSettings;
private final Listener sequenceAssetLoaderListener; private final Listener sequenceAssetLoaderListener;
@ -139,6 +140,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
editedMediaItems = sequence.editedMediaItems; editedMediaItems = sequence.editedMediaItems;
isLooping = sequence.isLooping; isLooping = sequence.isLooping;
this.forceAudioTrack = sequence.forceAudioTrack; this.forceAudioTrack = sequence.forceAudioTrack;
this.forceVideoTrack = sequence.forceVideoTrack;
this.assetLoaderFactory = new GapInterceptingAssetLoaderFactory(assetLoaderFactory); this.assetLoaderFactory = new GapInterceptingAssetLoaderFactory(assetLoaderFactory);
this.compositionSettings = compositionSettings; this.compositionSettings = compositionSettings;
sequenceAssetLoaderListener = listener; sequenceAssetLoaderListener = listener;
@ -271,10 +273,16 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return decode; return decode;
} }
boolean addForcedAudioTrack = forceAudioTrack && reportedTrackCount.get() == 1 && !isAudio; boolean addForcedAudioTrack = false;
boolean addForcedVideoTrack = false;
if (reportedTrackCount.get() == 1) {
addForcedAudioTrack = forceAudioTrack && !isAudio;
addForcedVideoTrack = forceVideoTrack && isAudio;
}
if (!isTrackCountReported) { if (!isTrackCountReported) {
int trackCount = reportedTrackCount.get() + (addForcedAudioTrack ? 1 : 0); int trackCount =
reportedTrackCount.get() + (addForcedAudioTrack || addForcedVideoTrack ? 1 : 0);
sequenceAssetLoaderListener.onTrackCount(trackCount); sequenceAssetLoaderListener.onTrackCount(trackCount);
isTrackCountReported = true; isTrackCountReported = true;
} }
@ -293,6 +301,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
FORCE_AUDIO_TRACK_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED); FORCE_AUDIO_TRACK_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED);
decodeAudio = true; decodeAudio = true;
} }
if (addForcedVideoTrack) {
sequenceAssetLoaderListener.onTrackAdded(
BLANK_IMAGE_BITMAP_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED);
decodeVideo = true;
}
return decodeOutput; return decodeOutput;
} }
@ -324,7 +337,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
sampleConsumer = new SampleConsumerWrapper(wrappedSampleConsumer, trackType); sampleConsumer = new SampleConsumerWrapper(wrappedSampleConsumer, trackType);
sampleConsumersByTrackType.put(trackType, sampleConsumer); sampleConsumersByTrackType.put(trackType, sampleConsumer);
if (forceAudioTrack && reportedTrackCount.get() == 1 && trackType == C.TRACK_TYPE_VIDEO) { if (reportedTrackCount.get() == 1) {
if (forceAudioTrack && trackType == C.TRACK_TYPE_VIDEO) {
SampleConsumer wrappedAudioSampleConsumer = SampleConsumer wrappedAudioSampleConsumer =
checkStateNotNull( checkStateNotNull(
sequenceAssetLoaderListener.onOutputFormat( sequenceAssetLoaderListener.onOutputFormat(
@ -336,18 +350,28 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
sampleConsumersByTrackType.put( sampleConsumersByTrackType.put(
C.TRACK_TYPE_AUDIO, C.TRACK_TYPE_AUDIO,
new SampleConsumerWrapper(wrappedAudioSampleConsumer, C.TRACK_TYPE_AUDIO)); new SampleConsumerWrapper(wrappedAudioSampleConsumer, C.TRACK_TYPE_AUDIO));
} else if (forceVideoTrack && trackType == C.TRACK_TYPE_AUDIO) {
SampleConsumer wrappedVideoSampleConsumer =
checkStateNotNull(
sequenceAssetLoaderListener.onOutputFormat(BLANK_IMAGE_BITMAP_FORMAT));
sampleConsumersByTrackType.put(
C.TRACK_TYPE_VIDEO,
new SampleConsumerWrapper(wrappedVideoSampleConsumer, C.TRACK_TYPE_VIDEO));
}
} }
} else { } else {
String missingTrackMessage =
trackType == C.TRACK_TYPE_AUDIO
? "The preceding MediaItem does not contain any audio track. If the sequence starts"
+ " with an item without audio track (like images), followed by items with"
+ " audio tracks, then EditedMediaItemSequence.Builder.setForceAudioTrack()"
+ " needs to be set to true."
: "The preceding MediaItem does not contain any video track. If the sequence starts"
+ " with an item without video track (audio only), followed by items with video"
+ " tracks, then EditedMediaItemSequence.Builder.setForceVideoTrack() needs to"
+ " be set to true.";
sampleConsumer = sampleConsumer =
checkStateNotNull( checkStateNotNull(sampleConsumersByTrackType.get(trackType), missingTrackMessage);
sampleConsumersByTrackType.get(trackType),
Util.formatInvariant(
"The preceding MediaItem does not contain any track of type %d. If the"
+ " Composition contains a sequence that starts with items without audio"
+ " tracks (like images), followed by items with audio tracks,"
+ " Composition.Builder.experimentalSetForceAudioTrack() needs to be set to"
+ " true.",
trackType));
} }
onMediaItemChanged(trackType, format); onMediaItemChanged(trackType, format);
if (reportedTrackCount.get() == 1 && sampleConsumersByTrackType.size() == 2) { if (reportedTrackCount.get() == 1 && sampleConsumersByTrackType.size() == 2) {
@ -700,7 +724,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private GapSignalingAssetLoader(long durationUs) { private GapSignalingAssetLoader(long durationUs) {
this.durationUs = durationUs; this.durationUs = durationUs;
shouldProduceAudio = sequenceHasAudio || forceAudioTrack; shouldProduceAudio = sequenceHasAudio || forceAudioTrack;
shouldProduceVideo = sequenceHasVideo; shouldProduceVideo = sequenceHasVideo || forceVideoTrack;
checkState(shouldProduceAudio || shouldProduceVideo); checkState(shouldProduceAudio || shouldProduceVideo);
this.audioTrackFormat = new Format.Builder().setSampleMimeType(MimeTypes.AUDIO_RAW).build(); this.audioTrackFormat = new Format.Builder().setSampleMimeType(MimeTypes.AUDIO_RAW).build();
this.audioTrackDecodedFormat = this.audioTrackDecodedFormat =