Compare commits

...

6 Commits

Author SHA1 Message Date
kimvde
12b4c7d780 Remove unused param from VideoSink
PiperOrigin-RevId: 740739740
2025-03-26 06:15:03 -07:00
Googler
a24d1d41d9 BoxParser: Update Dolby Vision codec support.
1. Append initialization data of Dolby Vision codec to list of initialization data.
2. Parse `dwcC` box in BoxParser.

PiperOrigin-RevId: 740737770
2025-03-26 06:07:33 -07:00
kimvde
3d5e650980 Improvements to VideoSink.Listener
- Remove unused VideoSink parameter
- Make methods default so that implementations don't have to override
them all

PiperOrigin-RevId: 740715000
2025-03-26 04:28:07 -07:00
kimvde
d4ea3ad932 PlaybackVideoGraphWrapper: fix inputType discrepancy
PiperOrigin-RevId: 740699944
2025-03-26 03:29:43 -07:00
Googler
598ec00c5d DolbyVisionConfig: Move file to the container library.
For usage of instance of `DolbyVisionConfig` used in Muxer to support Dolby Vision codec, move `DolbyVisionConfig` file from extractor library to container library.

PiperOrigin-RevId: 740689000
2025-03-26 02:43:37 -07:00
kimvde
6cd15fb4b0 Remove PlaybackVideoGraphWrapper param from its listener methods
It's unclear why the listeners would need access to the
PlaybackVideoGraphWrapper as there should always be only one. Also,
it's not common practice to pass the parent object to the listener in
media3 (see Player.Listener for example).

PiperOrigin-RevId: 740686007
2025-03-26 02:32:51 -07:00
30 changed files with 98 additions and 98 deletions

View File

@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package androidx.media3.extractor; package androidx.media3.container;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.media3.common.util.ParsableByteArray; import androidx.media3.common.util.ParsableByteArray;

View File

@ -117,6 +117,9 @@ public abstract class Mp4Box {
@SuppressWarnings("ConstantCaseForConstants") @SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_dvvC = 0x64767643; public static final int TYPE_dvvC = 0x64767643;
@SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_dvwC = 0x64767743;
@SuppressWarnings("ConstantCaseForConstants") @SuppressWarnings("ConstantCaseForConstants")
public static final int TYPE_s263 = 0x73323633; public static final int TYPE_s263 = 0x73323633;

View File

@ -220,7 +220,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) { long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) {
videoFrameHandlers.add(videoFrameHandler); videoFrameHandlers.add(videoFrameHandler);
long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs; long bufferPresentationTimeUs = framePresentationTimeUs - bufferTimestampAdjustmentUs;
videoFrameRenderControl.onFrameAvailableForRendering(bufferPresentationTimeUs); videoFrameRenderControl.onFrameAvailableForRendering(bufferPresentationTimeUs);
@ -276,14 +276,14 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
.setHeight(videoSize.height) .setHeight(videoSize.height)
.setSampleMimeType(MimeTypes.VIDEO_RAW) .setSampleMimeType(MimeTypes.VIDEO_RAW)
.build(); .build();
listenerExecutor.execute(() -> listener.onVideoSizeChanged(DefaultVideoSink.this, videoSize)); listenerExecutor.execute(() -> listener.onVideoSizeChanged(videoSize));
} }
@Override @Override
public void renderFrame( public void renderFrame(
long renderTimeNs, long bufferPresentationTimeUs, boolean isFirstFrame) { long renderTimeNs, long bufferPresentationTimeUs, boolean isFirstFrame) {
if (isFirstFrame && outputSurface != null) { if (isFirstFrame && outputSurface != null) {
listenerExecutor.execute(() -> listener.onFirstFrameRendered(DefaultVideoSink.this)); listenerExecutor.execute(() -> listener.onFirstFrameRendered());
} }
// TODO - b/292111083: outputFormat is initialized after the first frame is rendered because // TODO - b/292111083: outputFormat is initialized after the first frame is rendered because
// onVideoSizeChanged is announced after the first frame is available for rendering. // onVideoSizeChanged is announced after the first frame is available for rendering.
@ -298,7 +298,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public void dropFrame() { public void dropFrame() {
listenerExecutor.execute(() -> listener.onFrameDropped(DefaultVideoSink.this)); listenerExecutor.execute(() -> listener.onFrameDropped());
videoFrameHandlers.remove().skip(); videoFrameHandlers.remove().skip();
} }
} }

View File

@ -884,14 +884,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
videoSink.setListener( videoSink.setListener(
new VideoSink.Listener() { new VideoSink.Listener() {
@Override @Override
public void onFirstFrameRendered(VideoSink videoSink) { public void onFirstFrameRendered() {
if (displaySurface != null) { if (displaySurface != null) {
notifyRenderedFirstFrame(); notifyRenderedFirstFrame();
} }
} }
@Override @Override
public void onFrameDropped(VideoSink videoSink) { public void onFrameDropped() {
if (displaySurface != null) { if (displaySurface != null) {
updateDroppedBufferCounters( updateDroppedBufferCounters(
/* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1); /* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1);
@ -899,15 +899,14 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
} }
@Override @Override
public void onVideoSizeChanged(VideoSink videoSink, VideoSize videoSize) { public void onVideoSizeChanged(VideoSize videoSize) {
// TODO: b/292111083 - Report video size change to app. Video size reporting is // TODO: b/292111083 - Report video size change to app. Video size reporting is
// removed at the moment to ensure the first frame is rendered, and the video is // removed at the moment to ensure the first frame is rendered, and the video is
// rendered after switching on/off the screen. // rendered after switching on/off the screen.
} }
@Override @Override
public void onError( public void onError(VideoSink.VideoSinkException videoSinkException) {
VideoSink videoSink, VideoSink.VideoSinkException videoSinkException) {
setPendingPlaybackException( setPendingPlaybackException(
createRendererException( createRendererException(
videoSinkException, videoSinkException,
@ -1742,7 +1741,6 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer
long framePresentationTimeUs = bufferPresentationTimeUs + getBufferTimestampAdjustmentUs(); long framePresentationTimeUs = bufferPresentationTimeUs + getBufferTimestampAdjustmentUs();
return videoSink.handleInputFrame( return videoSink.handleInputFrame(
framePresentationTimeUs, framePresentationTimeUs,
isLastBuffer,
new VideoSink.VideoFrameHandler() { new VideoSink.VideoFrameHandler() {
@Override @Override
public void render(long renderTimestampNs) { public void render(long renderTimestampNs) {

View File

@ -80,43 +80,26 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
/** Listener for {@link PlaybackVideoGraphWrapper} events. */ /** Listener for {@link PlaybackVideoGraphWrapper} events. */
public interface Listener { public interface Listener {
/** /** Called when the video frame processor renders the first frame. */
* Called when the video frame processor renders the first frame. void onFirstFrameRendered();
*
* @param playbackVideoGraphWrapper The {@link PlaybackVideoGraphWrapper} which triggered this
* event.
*/
void onFirstFrameRendered(PlaybackVideoGraphWrapper playbackVideoGraphWrapper);
/** /** Called when the video frame processor dropped a frame. */
* Called when the video frame processor dropped a frame. void onFrameDropped();
*
* @param playbackVideoGraphWrapper The {@link PlaybackVideoGraphWrapper} which triggered this
* event.
*/
void onFrameDropped(PlaybackVideoGraphWrapper playbackVideoGraphWrapper);
/** /**
* Called before a frame is rendered for the first time since setting the surface, and each time * Called before a frame is rendered for the first time since setting the surface, and each time
* there's a change in the size, rotation or pixel aspect ratio of the video being rendered. * there's a change in the size, rotation or pixel aspect ratio of the video being rendered.
* *
* @param playbackVideoGraphWrapper The {@link PlaybackVideoGraphWrapper} which triggered this
* event.
* @param videoSize The video size. * @param videoSize The video size.
*/ */
void onVideoSizeChanged( void onVideoSizeChanged(VideoSize videoSize);
PlaybackVideoGraphWrapper playbackVideoGraphWrapper, VideoSize videoSize);
/** /**
* Called when the video frame processor encountered an error. * Called when the video frame processor encountered an error.
* *
* @param playbackVideoGraphWrapper The {@link PlaybackVideoGraphWrapper} which triggered this
* event.
* @param videoFrameProcessingException The error. * @param videoFrameProcessingException The error.
*/ */
void onError( void onError(VideoFrameProcessingException videoFrameProcessingException);
PlaybackVideoGraphWrapper playbackVideoGraphWrapper,
VideoFrameProcessingException videoFrameProcessingException);
} }
/** A builder for {@link PlaybackVideoGraphWrapper} instances. */ /** A builder for {@link PlaybackVideoGraphWrapper} instances. */
@ -474,10 +457,10 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction; outputStreamFirstFrameReleaseInstruction = streamChangeInfo.firstFrameReleaseInstruction;
onOutputStreamChanged(); onOutputStreamChanged();
} }
defaultVideoSink.handleInputFrame(framePresentationTimeUs, videoFrameHandler);
boolean isLastFrame = boolean isLastFrame =
finalBufferPresentationTimeUs != C.TIME_UNSET finalBufferPresentationTimeUs != C.TIME_UNSET
&& bufferPresentationTimeUs >= finalBufferPresentationTimeUs; && bufferPresentationTimeUs >= finalBufferPresentationTimeUs;
defaultVideoSink.handleInputFrame(framePresentationTimeUs, isLastFrame, videoFrameHandler);
if (isLastFrame) { if (isLastFrame) {
// TODO b/257464707 - Support extensively modified media. // TODO b/257464707 - Support extensively modified media.
defaultVideoSink.signalEndOfCurrentInputStream(); defaultVideoSink.signalEndOfCurrentInputStream();
@ -493,7 +476,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override @Override
public void onError(VideoFrameProcessingException exception) { public void onError(VideoFrameProcessingException exception) {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) { for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
listener.onError(/* playbackVideoGraphWrapper= */ this, exception); listener.onError(exception);
} }
} }
@ -797,9 +780,6 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@FirstFrameReleaseInstruction int firstFrameReleaseInstruction, @FirstFrameReleaseInstruction int firstFrameReleaseInstruction,
List<Effect> videoEffects) { List<Effect> videoEffects) {
checkState(isInitialized()); checkState(isInitialized());
if (inputType != INPUT_TYPE_SURFACE && inputType != INPUT_TYPE_BITMAP) {
throw new UnsupportedOperationException("Unsupported input type " + inputType);
}
setPendingVideoEffects(videoEffects); setPendingVideoEffects(videoEffects);
this.inputType = inputType; this.inputType = inputType;
this.inputFormat = format; this.inputFormat = format;
@ -915,7 +895,7 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
@Override @Override
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) { long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) {
checkState(isInitialized()); checkState(isInitialized());
if (!shouldRenderToInputVideoSink()) { if (!shouldRenderToInputVideoSink()) {
@ -987,35 +967,29 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
// PlaybackVideoGraphWrapper.Listener implementation // PlaybackVideoGraphWrapper.Listener implementation
@Override @Override
public void onFirstFrameRendered(PlaybackVideoGraphWrapper playbackVideoGraphWrapper) { public void onFirstFrameRendered() {
VideoSink.Listener currentListener = listener; VideoSink.Listener currentListener = listener;
listenerExecutor.execute(() -> currentListener.onFirstFrameRendered(/* videoSink= */ this)); listenerExecutor.execute(currentListener::onFirstFrameRendered);
} }
@Override @Override
public void onFrameDropped(PlaybackVideoGraphWrapper playbackVideoGraphWrapper) { public void onFrameDropped() {
VideoSink.Listener currentListener = listener; VideoSink.Listener currentListener = listener;
listenerExecutor.execute( listenerExecutor.execute(currentListener::onFrameDropped);
() -> currentListener.onFrameDropped(checkStateNotNull(/* reference= */ this)));
} }
@Override @Override
public void onVideoSizeChanged( public void onVideoSizeChanged(VideoSize videoSize) {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper, VideoSize videoSize) {
VideoSink.Listener currentListener = listener; VideoSink.Listener currentListener = listener;
listenerExecutor.execute( listenerExecutor.execute(() -> currentListener.onVideoSizeChanged(videoSize));
() -> currentListener.onVideoSizeChanged(/* videoSink= */ this, videoSize));
} }
@Override @Override
public void onError( public void onError(VideoFrameProcessingException videoFrameProcessingException) {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper,
VideoFrameProcessingException videoFrameProcessingException) {
VideoSink.Listener currentListener = listener; VideoSink.Listener currentListener = listener;
listenerExecutor.execute( listenerExecutor.execute(
() -> () ->
currentListener.onError( currentListener.onError(
/* videoSink= */ this,
new VideoSinkException( new VideoSinkException(
videoFrameProcessingException, checkStateNotNull(this.inputFormat)))); videoFrameProcessingException, checkStateNotNull(this.inputFormat))));
} }
@ -1045,40 +1019,48 @@ public final class PlaybackVideoGraphWrapper implements VideoSinkProvider, Video
.buildUpon() .buildUpon()
.setColorInfo(getAdjustedInputColorInfo(inputFormat.colorInfo)) .setColorInfo(getAdjustedInputColorInfo(inputFormat.colorInfo))
.build(); .build();
@VideoFrameProcessor.InputType
int videoGraphInputType =
inputType == INPUT_TYPE_SURFACE
? VideoFrameProcessor.INPUT_TYPE_SURFACE
: VideoFrameProcessor.INPUT_TYPE_BITMAP;
checkNotNull(videoGraph) checkNotNull(videoGraph)
.registerInputStream( .registerInputStream(
inputIndex, inputType, adjustedInputFormat, videoEffects, /* offsetToAddUs= */ 0); inputIndex,
videoGraphInputType,
adjustedInputFormat,
videoEffects,
/* offsetToAddUs= */ 0);
} }
} }
private final class DefaultVideoSinkListener implements VideoSink.Listener { private final class DefaultVideoSinkListener implements VideoSink.Listener {
@Override @Override
public void onFirstFrameRendered(VideoSink videoSink) { public void onFirstFrameRendered() {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) { for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
listener.onFirstFrameRendered(PlaybackVideoGraphWrapper.this); listener.onFirstFrameRendered();
} }
} }
@Override @Override
public void onFrameDropped(VideoSink videoSink) { public void onFrameDropped() {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) { for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
listener.onFrameDropped(PlaybackVideoGraphWrapper.this); listener.onFrameDropped();
} }
} }
@Override @Override
public void onVideoSizeChanged(VideoSink videoSink, VideoSize videoSize) { public void onVideoSizeChanged(VideoSize videoSize) {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) { for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
listener.onVideoSizeChanged(PlaybackVideoGraphWrapper.this, videoSize); listener.onVideoSizeChanged(videoSize);
} }
} }
@Override @Override
public void onError(VideoSink videoSink, VideoSink.VideoSinkException videoSinkException) { public void onError(VideoSink.VideoSinkException videoSinkException) {
for (PlaybackVideoGraphWrapper.Listener listener : listeners) { for (PlaybackVideoGraphWrapper.Listener listener : listeners) {
listener.onError( listener.onError(VideoFrameProcessingException.from(videoSinkException));
PlaybackVideoGraphWrapper.this, VideoFrameProcessingException.from(videoSinkException));
} }
} }
} }

View File

@ -57,36 +57,23 @@ public interface VideoSink {
/** Listener for {@link VideoSink} events. */ /** Listener for {@link VideoSink} events. */
interface Listener { interface Listener {
/** Called when the sink renders the first frame on the output surface. */ /** Called when the sink renders the first frame on the output surface. */
void onFirstFrameRendered(VideoSink videoSink); default void onFirstFrameRendered() {}
/** Called when the sink dropped a frame. */ /** Called when the sink dropped a frame. */
void onFrameDropped(VideoSink videoSink); default void onFrameDropped() {}
/** /**
* Called before a frame is rendered for the first time after setting the output surface, and * Called before a frame is rendered for the first time after setting the output surface, and
* each time there's a change in the size, rotation or pixel aspect ratio of the video being * each time there's a change in the size, rotation or pixel aspect ratio of the video being
* rendered. * rendered.
*/ */
void onVideoSizeChanged(VideoSink videoSink, VideoSize videoSize); default void onVideoSizeChanged(VideoSize videoSize) {}
/** Called when the {@link VideoSink} encountered an error. */ /** Called when the {@link VideoSink} encountered an error. */
void onError(VideoSink videoSink, VideoSinkException videoSinkException); default void onError(VideoSinkException videoSinkException) {}
/** A no-op listener implementation. */ /** A no-op listener implementation. */
Listener NO_OP = Listener NO_OP = new Listener() {};
new Listener() {
@Override
public void onFirstFrameRendered(VideoSink videoSink) {}
@Override
public void onFrameDropped(VideoSink videoSink) {}
@Override
public void onVideoSizeChanged(VideoSink videoSink, VideoSize videoSize) {}
@Override
public void onError(VideoSink videoSink, VideoSinkException videoSinkException) {}
};
} }
/** Handler for a video frame. */ /** Handler for a video frame. */
@ -288,15 +275,11 @@ public interface VideoSink {
* Format, long, int, List) signaled}. * Format, long, int, List) signaled}.
* *
* @param framePresentationTimeUs The frame's presentation time, in microseconds. * @param framePresentationTimeUs The frame's presentation time, in microseconds.
* @param isLastFrame Whether this is the last frame of the video stream. This flag is set on a
* best effort basis, and any logic relying on it should degrade gracefully to handle cases
* where it's not set.
* @param videoFrameHandler The {@link VideoFrameHandler} used to handle the input frame. * @param videoFrameHandler The {@link VideoFrameHandler} used to handle the input frame.
* @return Whether the frame was handled successfully. If {@code false}, the caller can try again * @return Whether the frame was handled successfully. If {@code false}, the caller can try again
* later. * later.
*/ */
boolean handleInputFrame( boolean handleInputFrame(long framePresentationTimeUs, VideoFrameHandler videoFrameHandler);
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler);
/** /**
* Handles an input {@link Bitmap}. * Handles an input {@link Bitmap}.

View File

@ -41,11 +41,11 @@ import androidx.media3.common.util.NullableType;
import androidx.media3.common.util.ParsableByteArray; import androidx.media3.common.util.ParsableByteArray;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.container.DolbyVisionConfig;
import androidx.media3.container.NalUnitUtil; import androidx.media3.container.NalUnitUtil;
import androidx.media3.extractor.AacUtil; import androidx.media3.extractor.AacUtil;
import androidx.media3.extractor.AvcConfig; import androidx.media3.extractor.AvcConfig;
import androidx.media3.extractor.ChunkIndex; import androidx.media3.extractor.ChunkIndex;
import androidx.media3.extractor.DolbyVisionConfig;
import androidx.media3.extractor.Extractor; import androidx.media3.extractor.Extractor;
import androidx.media3.extractor.ExtractorInput; import androidx.media3.extractor.ExtractorInput;
import androidx.media3.extractor.ExtractorOutput; import androidx.media3.extractor.ExtractorOutput;

View File

@ -36,6 +36,7 @@ import androidx.media3.common.util.ParsableBitArray;
import androidx.media3.common.util.ParsableByteArray; import androidx.media3.common.util.ParsableByteArray;
import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.UnstableApi;
import androidx.media3.common.util.Util; import androidx.media3.common.util.Util;
import androidx.media3.container.DolbyVisionConfig;
import androidx.media3.container.Mp4Box; import androidx.media3.container.Mp4Box;
import androidx.media3.container.Mp4Box.LeafBox; import androidx.media3.container.Mp4Box.LeafBox;
import androidx.media3.container.Mp4LocationData; import androidx.media3.container.Mp4LocationData;
@ -45,7 +46,6 @@ import androidx.media3.extractor.AacUtil;
import androidx.media3.extractor.Ac3Util; import androidx.media3.extractor.Ac3Util;
import androidx.media3.extractor.Ac4Util; import androidx.media3.extractor.Ac4Util;
import androidx.media3.extractor.AvcConfig; import androidx.media3.extractor.AvcConfig;
import androidx.media3.extractor.DolbyVisionConfig;
import androidx.media3.extractor.ExtractorUtil; import androidx.media3.extractor.ExtractorUtil;
import androidx.media3.extractor.GaplessInfoHolder; import androidx.media3.extractor.GaplessInfoHolder;
import androidx.media3.extractor.HevcConfig; import androidx.media3.extractor.HevcConfig;
@ -1367,7 +1367,24 @@ public final class BoxParser {
: C.STEREO_MODE_INTERLEAVED_LEFT_PRIMARY; : C.STEREO_MODE_INTERLEAVED_LEFT_PRIMARY;
} }
} }
} else if (childAtomType == Mp4Box.TYPE_dvcC || childAtomType == Mp4Box.TYPE_dvvC) { } else if (childAtomType == Mp4Box.TYPE_dvcC
|| childAtomType == Mp4Box.TYPE_dvvC
|| childAtomType == Mp4Box.TYPE_dvwC) {
int childAtomBodySize = childAtomSize - Mp4Box.HEADER_SIZE;
byte[] initializationDataChunk = new byte[childAtomBodySize];
parent.readBytes(initializationDataChunk, /* offset= */ 0, childAtomBodySize);
// Add the initialization data of Dolby Vision to the existing list of initialization data.
if (initializationData != null) {
initializationData =
ImmutableList.<byte[]>builder()
.addAll(initializationData)
.add(initializationDataChunk)
.build();
} else {
ExtractorUtil.checkContainerInput(
false, "initializationData must already be set from hvcC or avcC atom");
}
parent.setPosition(childStartPosition + Mp4Box.HEADER_SIZE);
@Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent); @Nullable DolbyVisionConfig dolbyVisionConfig = DolbyVisionConfig.parse(parent);
if (dolbyVisionConfig != null) { if (dolbyVisionConfig != null) {
codecs = dolbyVisionConfig.codecs; codecs = dolbyVisionConfig.codecs;

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = -455000 time = -455000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 611666 time = 611666
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 1680000 time = 1680000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 1680000 time = 1680000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = -455000 time = -455000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 611666 time = 611666
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 1680000 time = 1680000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = 1680000 time = 1680000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = -455000 time = -455000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600] metadata = entries=[Mp4Timestamp: creation time=3788952614, modification time=3788952614, timescale=600]
initializationData: initializationData:
data = length 97, hash 32FB3D18 data = length 97, hash 32FB3D18
data = length 24, hash A31E9935
sample 0: sample 0:
time = -455000 time = -455000
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -32,6 +32,7 @@ track 0:
metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600] metadata = entries=[mdta: key=com.apple.quicktime.location.accuracy.horizontal, value=3.754789, mdta: key=com.apple.quicktime.location.ISO6709, value=+37.7450-122.4301+066.374/, mdta: key=com.apple.quicktime.make, value=Apple, mdta: key=com.apple.quicktime.model, value=iPhone 12 Pro Max, mdta: key=com.apple.quicktime.software, value=14.5.1, mdta: key=com.apple.quicktime.creationdate, value=2021-05-25T09:21:51-0700, Mp4Timestamp: creation time=3704804511, modification time=3704804511, timescale=600]
initializationData: initializationData:
data = length 526, hash 7B3FC433 data = length 526, hash 7B3FC433
data = length 24, hash A31E9935
sample 0: sample 0:
time = 0 time = 0
flags = 1 flags = 1

View File

@ -234,9 +234,9 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
*/ */
@Override @Override
public boolean handleInputFrame( public boolean handleInputFrame(
long framePresentationTimeUs, boolean isLastFrame, VideoFrameHandler videoFrameHandler) { long framePresentationTimeUs, VideoFrameHandler videoFrameHandler) {
return videoSink != null return videoSink != null
&& videoSink.handleInputFrame(framePresentationTimeUs, isLastFrame, videoFrameHandler); && videoSink.handleInputFrame(framePresentationTimeUs, videoFrameHandler);
} }
/** /**

View File

@ -408,7 +408,7 @@ public final class CompositionPlayer extends SimpleBasePlayer
// PlaybackVideoGraphWrapper.Listener methods. Called on playback thread. // PlaybackVideoGraphWrapper.Listener methods. Called on playback thread.
@Override @Override
public void onFirstFrameRendered(PlaybackVideoGraphWrapper playbackVideoGraphWrapper) { public void onFirstFrameRendered() {
applicationHandler.post( applicationHandler.post(
() -> { () -> {
CompositionPlayer.this.renderedFirstFrame = true; CompositionPlayer.this.renderedFirstFrame = true;
@ -417,21 +417,18 @@ public final class CompositionPlayer extends SimpleBasePlayer
} }
@Override @Override
public void onFrameDropped(PlaybackVideoGraphWrapper playbackVideoGraphWrapper) { public void onFrameDropped() {
// Do not post to application thread on each dropped frame, because onFrameDropped // Do not post to application thread on each dropped frame, because onFrameDropped
// may be called frequently when resources are already scarce. // may be called frequently when resources are already scarce.
} }
@Override @Override
public void onVideoSizeChanged( public void onVideoSizeChanged(VideoSize videoSize) {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper, VideoSize videoSize) {
// TODO: b/328219481 - Report video size change to app. // TODO: b/328219481 - Report video size change to app.
} }
@Override @Override
public void onError( public void onError(VideoFrameProcessingException videoFrameProcessingException) {
PlaybackVideoGraphWrapper playbackVideoGraphWrapper,
VideoFrameProcessingException videoFrameProcessingException) {
// The error will also be surfaced from the underlying ExoPlayer instance via // The error will also be surfaced from the underlying ExoPlayer instance via
// PlayerListener.onPlayerError, and it will arrive to the composition player twice. // PlayerListener.onPlayerError, and it will arrive to the composition player twice.
applicationHandler.post( applicationHandler.post(