parent
38f2413290
commit
3b9ca40b05
@ -316,8 +316,9 @@ public final class FrameworkSampleSource implements SampleSource, SampleSourceRe
|
||||
long durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION)
|
||||
? format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US;
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
|
||||
MediaFormat.NO_VALUE, channelCount, sampleRate, language, initializationData,
|
||||
false, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE);
|
||||
MediaFormat.NO_VALUE, channelCount, sampleRate, language,
|
||||
MediaFormat.OFFSET_SAMPLE_RELATIVE, initializationData, false, MediaFormat.NO_VALUE,
|
||||
MediaFormat.NO_VALUE);
|
||||
}
|
||||
|
||||
@TargetApi(16)
|
||||
|
@ -33,6 +33,12 @@ public final class MediaFormat {
|
||||
|
||||
public static final int NO_VALUE = -1;
|
||||
|
||||
/**
|
||||
* A value for {@link #subsampleOffsetUs} to indicate that subsample timestamps are relative to
|
||||
* the timestamps of their parent samples.
|
||||
*/
|
||||
public static final long OFFSET_SAMPLE_RELATIVE = Long.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* The mime type of the format.
|
||||
*/
|
||||
@ -112,6 +118,13 @@ public final class MediaFormat {
|
||||
*/
|
||||
public final String language;
|
||||
|
||||
/**
|
||||
* For samples that contain subsamples, this is an offset that should be added to subsample
|
||||
* timestamps. A value of {@link #OFFSET_SAMPLE_RELATIVE} indicates that subsample timestamps are
|
||||
* relative to the timestamps of their parent samples.
|
||||
*/
|
||||
public final long subsampleOffsetUs;
|
||||
|
||||
// Lazy-initialized hashcode and framework media format.
|
||||
|
||||
private int hashCode;
|
||||
@ -134,8 +147,8 @@ public final class MediaFormat {
|
||||
int width, int height, int rotationDegrees, float pixelWidthHeightRatio,
|
||||
List<byte[]> initializationData) {
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
|
||||
pixelWidthHeightRatio, NO_VALUE, NO_VALUE, null, initializationData, false, NO_VALUE,
|
||||
NO_VALUE);
|
||||
pixelWidthHeightRatio, NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, initializationData,
|
||||
false, NO_VALUE, NO_VALUE);
|
||||
}
|
||||
|
||||
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount,
|
||||
@ -147,8 +160,8 @@ public final class MediaFormat {
|
||||
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, long durationUs,
|
||||
int channelCount, int sampleRate, List<byte[]> initializationData) {
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, channelCount, sampleRate, null, initializationData, false, NO_VALUE,
|
||||
NO_VALUE);
|
||||
NO_VALUE, channelCount, sampleRate, null, OFFSET_SAMPLE_RELATIVE, initializationData,
|
||||
false, NO_VALUE, NO_VALUE);
|
||||
}
|
||||
|
||||
public static MediaFormat createTextFormat(String mimeType, String language) {
|
||||
@ -156,8 +169,13 @@ public final class MediaFormat {
|
||||
}
|
||||
|
||||
public static MediaFormat createTextFormat(String mimeType, String language, long durationUs) {
|
||||
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, NO_VALUE, NO_VALUE, language, null, false, NO_VALUE, NO_VALUE);
|
||||
return createTextFormat(mimeType, language, durationUs, OFFSET_SAMPLE_RELATIVE);
|
||||
}
|
||||
|
||||
public static MediaFormat createTextFormat(String mimeType, String language, long durationUs,
|
||||
long subsampleOffsetUs) {
|
||||
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, NO_VALUE, language, subsampleOffsetUs, null, false, NO_VALUE, NO_VALUE);
|
||||
}
|
||||
|
||||
public static MediaFormat createFormatForMimeType(String mimeType) {
|
||||
@ -165,8 +183,8 @@ public final class MediaFormat {
|
||||
}
|
||||
|
||||
public static MediaFormat createFormatForMimeType(String mimeType, long durationUs) {
|
||||
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, NO_VALUE, NO_VALUE, null, null, false, NO_VALUE, NO_VALUE);
|
||||
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, false, NO_VALUE, NO_VALUE);
|
||||
}
|
||||
|
||||
public static MediaFormat createAdaptiveFormat(String mimeType) {
|
||||
@ -175,13 +193,13 @@ public final class MediaFormat {
|
||||
|
||||
public static MediaFormat createAdaptiveFormat(String mimeType, long durationUs) {
|
||||
return new MediaFormat(mimeType, NO_VALUE, durationUs, NO_VALUE, NO_VALUE, NO_VALUE,
|
||||
NO_VALUE, NO_VALUE, NO_VALUE, null, null, true, NO_VALUE, NO_VALUE);
|
||||
NO_VALUE, NO_VALUE, NO_VALUE, null, OFFSET_SAMPLE_RELATIVE, null, true, NO_VALUE, NO_VALUE);
|
||||
}
|
||||
|
||||
/* package */ MediaFormat(String mimeType, int maxInputSize, long durationUs, int width,
|
||||
int height, int rotationDegrees, float pixelWidthHeightRatio, int channelCount,
|
||||
int sampleRate, String language, List<byte[]> initializationData, boolean adaptive,
|
||||
int maxWidth, int maxHeight) {
|
||||
int sampleRate, String language, long subsampleOffsetUs, List<byte[]> initializationData,
|
||||
boolean adaptive, int maxWidth, int maxHeight) {
|
||||
this.mimeType = Assertions.checkNotEmpty(mimeType);
|
||||
this.maxInputSize = maxInputSize;
|
||||
this.durationUs = durationUs;
|
||||
@ -192,6 +210,7 @@ public final class MediaFormat {
|
||||
this.channelCount = channelCount;
|
||||
this.sampleRate = sampleRate;
|
||||
this.language = language;
|
||||
this.subsampleOffsetUs = subsampleOffsetUs;
|
||||
this.initializationData = initializationData == null ? Collections.<byte[]>emptyList()
|
||||
: initializationData;
|
||||
this.adaptive = adaptive;
|
||||
@ -199,16 +218,22 @@ public final class MediaFormat {
|
||||
this.maxHeight = maxHeight;
|
||||
}
|
||||
|
||||
public MediaFormat copyWithMaxVideoDimension(int maxWidth, int maxHeight) {
|
||||
public MediaFormat copyWithMaxVideoDimensions(int maxWidth, int maxHeight) {
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
|
||||
pixelWidthHeightRatio, channelCount, sampleRate, language, initializationData, false,
|
||||
maxWidth, maxHeight);
|
||||
pixelWidthHeightRatio, channelCount, sampleRate, language, subsampleOffsetUs,
|
||||
initializationData, adaptive, maxWidth, maxHeight);
|
||||
}
|
||||
|
||||
public MediaFormat copyWithSubsampleOffsetUs(long subsampleOffsetUs) {
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
|
||||
pixelWidthHeightRatio, channelCount, sampleRate, language, subsampleOffsetUs,
|
||||
initializationData, adaptive, maxWidth, maxHeight);
|
||||
}
|
||||
|
||||
public MediaFormat copyWithDurationUs(long durationUs) {
|
||||
return new MediaFormat(mimeType, maxInputSize, durationUs, width, height, rotationDegrees,
|
||||
pixelWidthHeightRatio, channelCount, sampleRate, language, initializationData, false,
|
||||
maxWidth, maxHeight);
|
||||
pixelWidthHeightRatio, channelCount, sampleRate, language, subsampleOffsetUs,
|
||||
initializationData, adaptive, maxWidth, maxHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -69,7 +69,7 @@ public class ContainerMediaChunk extends BaseMediaChunk implements SingleTrackOu
|
||||
isMediaFormatFinal);
|
||||
this.extractorWrapper = extractorWrapper;
|
||||
this.sampleOffsetUs = sampleOffsetUs;
|
||||
this.mediaFormat = mediaFormat;
|
||||
this.mediaFormat = getAdjustedMediaFormat(mediaFormat, sampleOffsetUs);
|
||||
this.drmInitData = drmInitData;
|
||||
}
|
||||
|
||||
@ -102,7 +102,7 @@ public class ContainerMediaChunk extends BaseMediaChunk implements SingleTrackOu
|
||||
|
||||
@Override
|
||||
public final void format(MediaFormat mediaFormat) {
|
||||
this.mediaFormat = mediaFormat;
|
||||
this.mediaFormat = getAdjustedMediaFormat(mediaFormat, sampleOffsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -160,4 +160,13 @@ public class ContainerMediaChunk extends BaseMediaChunk implements SingleTrackOu
|
||||
}
|
||||
}
|
||||
|
||||
// Private methods.
|
||||
|
||||
private static MediaFormat getAdjustedMediaFormat(MediaFormat format, long sampleOffsetUs) {
|
||||
if (sampleOffsetUs != 0 && format.subsampleOffsetUs != MediaFormat.OFFSET_SAMPLE_RELATIVE) {
|
||||
return format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + sampleOffsetUs);
|
||||
}
|
||||
return format;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -263,7 +263,7 @@ public class DashChunkSource implements ChunkSource {
|
||||
long periodDurationUs = (representations[0].periodDurationMs == TrackRenderer.UNKNOWN_TIME_US)
|
||||
? TrackRenderer.UNKNOWN_TIME_US : representations[0].periodDurationMs * 1000;
|
||||
// TODO: Remove this and pass proper formats instead (b/22996976).
|
||||
this.mediaFormat = MediaFormat.createFormatForMimeType(representations[0].format.mimeType,
|
||||
this.mediaFormat = MediaFormat.createFormatForMimeType(getMediaMimeType(representations[0]),
|
||||
periodDurationUs);
|
||||
|
||||
this.formats = new Format[representations.length];
|
||||
@ -287,7 +287,7 @@ public class DashChunkSource implements ChunkSource {
|
||||
@Override
|
||||
public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) {
|
||||
return MimeTypes.isVideo(mediaFormat.mimeType)
|
||||
? format.copyWithMaxVideoDimension(maxWidth, maxHeight) : format;
|
||||
? format.copyWithMaxVideoDimensions(maxWidth, maxHeight) : format;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -670,6 +670,16 @@ public class DashChunkSource implements ChunkSource {
|
||||
}
|
||||
}
|
||||
|
||||
private static String getMediaMimeType(Representation representation) {
|
||||
String mimeType = representation.format.mimeType;
|
||||
if (MimeTypes.APPLICATION_MP4.equals(representation.format.mimeType)
|
||||
&& "stpp".equals(representation.format.codecs)) {
|
||||
return MimeTypes.APPLICATION_TTML;
|
||||
}
|
||||
// TODO: Use codecs to determine media mime type for other formats too.
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
|
||||
int adaptationSetIndex, int[] representationIndices) {
|
||||
AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex);
|
||||
|
@ -102,6 +102,7 @@ import java.util.List;
|
||||
public static final int TYPE_stco = Util.getIntegerCodeForString("stco");
|
||||
public static final int TYPE_co64 = Util.getIntegerCodeForString("co64");
|
||||
public static final int TYPE_tx3g = Util.getIntegerCodeForString("tx3g");
|
||||
public static final int TYPE_stpp = Util.getIntegerCodeForString("stpp");
|
||||
|
||||
public final int type;
|
||||
|
||||
|
@ -45,8 +45,8 @@ import java.util.List;
|
||||
public static Track parseTrak(Atom.ContainerAtom trak, Atom.LeafAtom mvhd) {
|
||||
Atom.ContainerAtom mdia = trak.getContainerAtomOfType(Atom.TYPE_mdia);
|
||||
int trackType = parseHdlr(mdia.getLeafAtomOfType(Atom.TYPE_hdlr).data);
|
||||
if (trackType != Track.TYPE_AUDIO && trackType != Track.TYPE_VIDEO
|
||||
&& trackType != Track.TYPE_TEXT && trackType != Track.TYPE_SUBTITLE) {
|
||||
if (trackType != Track.TYPE_soun && trackType != Track.TYPE_vide && trackType != Track.TYPE_text
|
||||
&& trackType != Track.TYPE_sbtl && trackType != Track.TYPE_subt) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -381,6 +381,9 @@ import java.util.List;
|
||||
} else if (childAtomType == Atom.TYPE_tx3g) {
|
||||
out.mediaFormat = MediaFormat.createTextFormat(MimeTypes.APPLICATION_TX3G, language,
|
||||
durationUs);
|
||||
} else if (childAtomType == Atom.TYPE_stpp) {
|
||||
out.mediaFormat = MediaFormat.createTextFormat(MimeTypes.APPLICATION_TTML, language,
|
||||
durationUs, 0 /* subsample timing is absolute */);
|
||||
}
|
||||
stsd.setPosition(childStartPosition + childAtomSize);
|
||||
}
|
||||
|
@ -474,7 +474,7 @@ public final class FragmentedMp4Extractor implements Extractor {
|
||||
|
||||
long timescale = track.timescale;
|
||||
long cumulativeTime = decodeTime;
|
||||
boolean workaroundEveryVideoFrameIsSyncFrame = track.type == Track.TYPE_VIDEO
|
||||
boolean workaroundEveryVideoFrameIsSyncFrame = track.type == Track.TYPE_vide
|
||||
&& ((workaroundFlags & WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME)
|
||||
== WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
|
||||
for (int i = 0; i < sampleCount; i++) {
|
||||
|
@ -24,22 +24,11 @@ import com.google.android.exoplayer.util.Util;
|
||||
*/
|
||||
public final class Track {
|
||||
|
||||
/**
|
||||
* Type of a video track.
|
||||
*/
|
||||
public static final int TYPE_VIDEO = Util.getIntegerCodeForString("vide");
|
||||
/**
|
||||
* Type of an audio track.
|
||||
*/
|
||||
public static final int TYPE_AUDIO = Util.getIntegerCodeForString("soun");
|
||||
/**
|
||||
* Type of a text track.
|
||||
*/
|
||||
public static final int TYPE_TEXT = Util.getIntegerCodeForString("text");
|
||||
/**
|
||||
* Type of a subtitle track.
|
||||
*/
|
||||
public static final int TYPE_SUBTITLE = Util.getIntegerCodeForString("sbtl");
|
||||
public static final int TYPE_vide = Util.getIntegerCodeForString("vide");
|
||||
public static final int TYPE_soun = Util.getIntegerCodeForString("soun");
|
||||
public static final int TYPE_text = Util.getIntegerCodeForString("text");
|
||||
public static final int TYPE_sbtl = Util.getIntegerCodeForString("sbtl");
|
||||
public static final int TYPE_subt = Util.getIntegerCodeForString("subt");
|
||||
|
||||
/**
|
||||
* The track identifier.
|
||||
@ -47,7 +36,8 @@ public final class Track {
|
||||
public final int id;
|
||||
|
||||
/**
|
||||
* One of {@link #TYPE_VIDEO}, {@link #TYPE_AUDIO}, {@link #TYPE_TEXT} and {@link #TYPE_SUBTITLE}.
|
||||
* One of {@link #TYPE_vide}, {@link #TYPE_soun}, {@link #TYPE_text} and {@link #TYPE_sbtl} and
|
||||
* {@link #TYPE_subt}.
|
||||
*/
|
||||
public final int type;
|
||||
|
||||
@ -62,7 +52,7 @@ public final class Track {
|
||||
public final long durationUs;
|
||||
|
||||
/**
|
||||
* The format if {@link #type} is {@link #TYPE_VIDEO} or {@link #TYPE_AUDIO}. Null otherwise.
|
||||
* The media format.
|
||||
*/
|
||||
public final MediaFormat mediaFormat;
|
||||
|
||||
|
@ -234,7 +234,7 @@ public class HlsChunkSource {
|
||||
*/
|
||||
public MediaFormat getMaxVideoDimensions(MediaFormat format) {
|
||||
return (maxWidth == -1 || maxHeight == -1) ? format
|
||||
: format.copyWithMaxVideoDimension(maxWidth, maxHeight);
|
||||
: format.copyWithMaxVideoDimensions(maxWidth, maxHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -165,13 +165,14 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
||||
maxHeight = Math.max(maxHeight, formats[i].height);
|
||||
|
||||
MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex);
|
||||
int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_VIDEO
|
||||
: Track.TYPE_AUDIO;
|
||||
int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_vide
|
||||
: streamElement.type == StreamElement.TYPE_AUDIO ? Track.TYPE_soun
|
||||
: Track.TYPE_text;
|
||||
FragmentedMp4Extractor extractor = new FragmentedMp4Extractor(
|
||||
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
|
||||
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale,
|
||||
initialManifest.durationUs, mediaFormat, trackEncryptionBoxes,
|
||||
trackType == Track.TYPE_VIDEO ? 4 : -1));
|
||||
trackType == Track.TYPE_vide ? 4 : -1));
|
||||
extractorWrappers.put(trackIndex, new ChunkExtractorWrapper(extractor));
|
||||
mediaFormats.put(trackIndex, mediaFormat);
|
||||
}
|
||||
@ -183,7 +184,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
|
||||
@Override
|
||||
public final MediaFormat getWithMaxVideoDimensions(MediaFormat format) {
|
||||
return MimeTypes.isVideo(mediaFormat.mimeType)
|
||||
? format.copyWithMaxVideoDimension(maxWidth, maxHeight) : format;
|
||||
? format.copyWithMaxVideoDimensions(maxWidth, maxHeight) : format;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -32,19 +32,19 @@ import java.util.List;
|
||||
public final long startTimeUs;
|
||||
|
||||
private final Subtitle subtitle;
|
||||
private final long offsetUs;
|
||||
|
||||
/**
|
||||
* @param startTimeUs The start time of the subtitle.
|
||||
* @param subtitle The subtitle to wrap.
|
||||
* @param isRelative True if the wrapped subtitle's timestamps are relative to the start time.
|
||||
* False if they are absolute.
|
||||
* @param startTimeUs The start time of the subtitle.
|
||||
* @param offsetUs An offset to add to the subtitle timestamps.
|
||||
*/
|
||||
public PlayableSubtitle(long startTimeUs, Subtitle subtitle) {
|
||||
this.startTimeUs = startTimeUs;
|
||||
public PlayableSubtitle(Subtitle subtitle, boolean isRelative, long startTimeUs, long offsetUs) {
|
||||
this.subtitle = subtitle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNextEventTimeIndex(long timeUs) {
|
||||
return subtitle.getNextEventTimeIndex(timeUs - startTimeUs);
|
||||
this.startTimeUs = startTimeUs;
|
||||
this.offsetUs = (isRelative ? startTimeUs : 0) + offsetUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -54,17 +54,22 @@ import java.util.List;
|
||||
|
||||
@Override
|
||||
public long getEventTime(int index) {
|
||||
return subtitle.getEventTime(index) + startTimeUs;
|
||||
return subtitle.getEventTime(index) + offsetUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLastEventTime() {
|
||||
return subtitle.getLastEventTime() + startTimeUs;
|
||||
return subtitle.getLastEventTime() + offsetUs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNextEventTimeIndex(long timeUs) {
|
||||
return subtitle.getNextEventTimeIndex(timeUs - offsetUs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Cue> getCues(long timeUs) {
|
||||
return subtitle.getCues(timeUs - startTimeUs);
|
||||
return subtitle.getCues(timeUs - offsetUs);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
*/
|
||||
package com.google.android.exoplayer.text;
|
||||
|
||||
import com.google.android.exoplayer.MediaFormat;
|
||||
import com.google.android.exoplayer.SampleHolder;
|
||||
import com.google.android.exoplayer.util.Assertions;
|
||||
import com.google.android.exoplayer.util.Util;
|
||||
@ -34,14 +35,20 @@ import java.io.InputStream;
|
||||
*/
|
||||
/* package */ final class SubtitleParserHelper implements Handler.Callback {
|
||||
|
||||
private final SubtitleParser parser;
|
||||
private static final int MSG_FORMAT = 0;
|
||||
private static final int MSG_SAMPLE = 1;
|
||||
|
||||
private final SubtitleParser parser;
|
||||
private final Handler handler;
|
||||
|
||||
private SampleHolder sampleHolder;
|
||||
private boolean parsing;
|
||||
private PlayableSubtitle result;
|
||||
private IOException error;
|
||||
|
||||
private boolean subtitlesAreRelative;
|
||||
private long subtitleOffsetUs;
|
||||
|
||||
/**
|
||||
* @param looper The {@link Looper} associated with the thread on which parsing should occur.
|
||||
* @param parser The parser that should be used to parse the raw data.
|
||||
@ -84,6 +91,15 @@ import java.io.InputStream;
|
||||
return sampleHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the format of subsequent samples.
|
||||
*
|
||||
* @param format The format.
|
||||
*/
|
||||
public void setFormat(MediaFormat format) {
|
||||
handler.obtainMessage(MSG_FORMAT, format).sendToTarget();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a parsing operation.
|
||||
* <p>
|
||||
@ -95,7 +111,7 @@ import java.io.InputStream;
|
||||
parsing = true;
|
||||
result = null;
|
||||
error = null;
|
||||
handler.obtainMessage(0, Util.getTopInt(sampleHolder.timeUs),
|
||||
handler.obtainMessage(MSG_SAMPLE, Util.getTopInt(sampleHolder.timeUs),
|
||||
Util.getBottomInt(sampleHolder.timeUs), sampleHolder).sendToTarget();
|
||||
}
|
||||
|
||||
@ -122,8 +138,25 @@ import java.io.InputStream;
|
||||
|
||||
@Override
|
||||
public boolean handleMessage(Message msg) {
|
||||
long sampleTimeUs = Util.getLong(msg.arg1, msg.arg2);
|
||||
SampleHolder holder = (SampleHolder) msg.obj;
|
||||
switch (msg.what) {
|
||||
case MSG_FORMAT:
|
||||
handleFormat((MediaFormat) msg.obj);
|
||||
break;
|
||||
case MSG_SAMPLE:
|
||||
long sampleTimeUs = Util.getLong(msg.arg1, msg.arg2);
|
||||
SampleHolder holder = (SampleHolder) msg.obj;
|
||||
handleSample(sampleTimeUs, holder);
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void handleFormat(MediaFormat format) {
|
||||
subtitlesAreRelative = format.subsampleOffsetUs == MediaFormat.OFFSET_SAMPLE_RELATIVE;
|
||||
subtitleOffsetUs = subtitlesAreRelative ? 0 : format.subsampleOffsetUs;
|
||||
}
|
||||
|
||||
private void handleSample(long sampleTimeUs, SampleHolder holder) {
|
||||
Subtitle parsedSubtitle = null;
|
||||
IOException error = null;
|
||||
try {
|
||||
@ -136,12 +169,12 @@ import java.io.InputStream;
|
||||
if (sampleHolder != holder) {
|
||||
// A flush has occurred since this holder was posted. Do nothing.
|
||||
} else {
|
||||
this.result = new PlayableSubtitle(sampleTimeUs, parsedSubtitle);
|
||||
this.result = new PlayableSubtitle(parsedSubtitle, subtitlesAreRelative, sampleTimeUs,
|
||||
subtitleOffsetUs);
|
||||
this.error = error;
|
||||
this.parsing = false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -226,7 +226,9 @@ public final class TextTrackRenderer extends SampleSourceTrackRenderer implement
|
||||
SampleHolder sampleHolder = parserHelper.getSampleHolder();
|
||||
sampleHolder.clearData();
|
||||
int result = readSource(positionUs, formatHolder, sampleHolder, false);
|
||||
if (result == SampleSource.SAMPLE_READ) {
|
||||
if (result == SampleSource.FORMAT_READ) {
|
||||
parserHelper.setFormat(formatHolder.format);
|
||||
} else if (result == SampleSource.SAMPLE_READ) {
|
||||
parserHelper.startParseOperation();
|
||||
} else if (result == SampleSource.END_OF_STREAM) {
|
||||
inputStreamEnded = true;
|
||||
|
Loading…
x
Reference in New Issue
Block a user