mirror of
https://github.com/androidx/media.git
synced 2025-05-03 21:57:46 +08:00
commit
c42b265e65
10
README.md
10
README.md
@ -55,7 +55,7 @@ accompanying demo application. To get started:
|
|||||||
|
|
||||||
## Using Gradle ##
|
## Using Gradle ##
|
||||||
|
|
||||||
ExoPlayer can also be built using Gradle. You can include it as a dependent project and build from source. e.g.
|
ExoPlayer can also be built using Gradle. You can include it as a dependent project and build from source:
|
||||||
|
|
||||||
```
|
```
|
||||||
// settings.gradle
|
// settings.gradle
|
||||||
@ -74,3 +74,11 @@ If you want to use ExoPlayer as a jar, run:
|
|||||||
```
|
```
|
||||||
|
|
||||||
and copy library.jar to the libs-folder of your new project.
|
and copy library.jar to the libs-folder of your new project.
|
||||||
|
|
||||||
|
The project is also available on [jCenter](https://bintray.com/google/exoplayer/exoplayer/view):
|
||||||
|
|
||||||
|
```
|
||||||
|
compile 'com.google.android.exoplayer:exoplayer:rX.X.X'
|
||||||
|
```
|
||||||
|
|
||||||
|
Where `rX.X.X` should be replaced with the desired version.
|
||||||
|
@ -1,5 +1,17 @@
|
|||||||
# Release notes #
|
# Release notes #
|
||||||
|
|
||||||
|
### Current dev branch (from r1.3.3) ###
|
||||||
|
|
||||||
|
* Add option to TsExtractor to allow non-IDR keyframes.
|
||||||
|
* Added MulticastDataSource for connecting to multicast streams.
|
||||||
|
* (WorkInProgress) - First steps to supporting seeking in DASH DVR window.
|
||||||
|
* (WorkInProgress) - First steps to supporting styled + positioned subtitles.
|
||||||
|
|
||||||
|
### r1.3.3 ###
|
||||||
|
|
||||||
|
* HLS: Fix failure when playing HLS AAC streams.
|
||||||
|
* Misc bug fixes.
|
||||||
|
|
||||||
### r1.3.2 ###
|
### r1.3.2 ###
|
||||||
|
|
||||||
* DataSource improvements: `DefaultUriDataSource` now handles http://, https://, file://, asset://
|
* DataSource improvements: `DefaultUriDataSource` now handles http://, https://, file://, asset://
|
||||||
|
@ -16,8 +16,8 @@
|
|||||||
|
|
||||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
package="com.google.android.exoplayer.demo"
|
package="com.google.android.exoplayer.demo"
|
||||||
android:versionCode="1302"
|
android:versionCode="1303"
|
||||||
android:versionName="1.3.2"
|
android:versionName="1.3.3"
|
||||||
android:theme="@style/RootTheme">
|
android:theme="@style/RootTheme">
|
||||||
|
|
||||||
<uses-permission android:name="android.permission.INTERNET"/>
|
<uses-permission android:name="android.permission.INTERNET"/>
|
||||||
|
@ -17,6 +17,7 @@ package com.google.android.exoplayer.demo;
|
|||||||
|
|
||||||
import com.google.android.exoplayer.ExoPlayer;
|
import com.google.android.exoplayer.ExoPlayer;
|
||||||
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
|
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.audio.AudioTrack;
|
import com.google.android.exoplayer.audio.AudioTrack;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
import com.google.android.exoplayer.demo.player.DemoPlayer;
|
import com.google.android.exoplayer.demo.player.DemoPlayer;
|
||||||
@ -46,6 +47,7 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
|
|||||||
|
|
||||||
private long sessionStartTimeMs;
|
private long sessionStartTimeMs;
|
||||||
private long[] loadStartTimeMs;
|
private long[] loadStartTimeMs;
|
||||||
|
private long[] seekRangeValuesUs;
|
||||||
|
|
||||||
public EventLogger() {
|
public EventLogger() {
|
||||||
loadStartTimeMs = new long[DemoPlayer.RENDERER_COUNT];
|
loadStartTimeMs = new long[DemoPlayer.RENDERER_COUNT];
|
||||||
@ -163,7 +165,14 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
|
|||||||
@Override
|
@Override
|
||||||
public void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
|
public void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
|
||||||
long initializationDurationMs) {
|
long initializationDurationMs) {
|
||||||
Log.d(TAG, "decoderInitialized [" + getSessionTimeString() + "]");
|
Log.d(TAG, "decoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSeekRangeChanged(TimeRange seekRange) {
|
||||||
|
seekRangeValuesUs = seekRange.getCurrentBoundsUs(seekRangeValuesUs);
|
||||||
|
Log.d(TAG, "seekRange [ " + seekRange.type + ", " + seekRangeValuesUs[0] + ", "
|
||||||
|
+ seekRangeValuesUs[1] + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void printInternalError(String type, Exception e) {
|
private void printInternalError(String type, Exception e) {
|
||||||
|
@ -35,7 +35,8 @@ import com.google.android.exoplayer.metadata.GeobMetadata;
|
|||||||
import com.google.android.exoplayer.metadata.PrivMetadata;
|
import com.google.android.exoplayer.metadata.PrivMetadata;
|
||||||
import com.google.android.exoplayer.metadata.TxxxMetadata;
|
import com.google.android.exoplayer.metadata.TxxxMetadata;
|
||||||
import com.google.android.exoplayer.text.CaptionStyleCompat;
|
import com.google.android.exoplayer.text.CaptionStyleCompat;
|
||||||
import com.google.android.exoplayer.text.SubtitleView;
|
import com.google.android.exoplayer.text.Cue;
|
||||||
|
import com.google.android.exoplayer.text.SubtitleLayout;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
import com.google.android.exoplayer.util.VerboseLogUtil;
|
import com.google.android.exoplayer.util.VerboseLogUtil;
|
||||||
|
|
||||||
@ -43,12 +44,10 @@ import android.annotation.TargetApi;
|
|||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.Intent;
|
import android.content.Intent;
|
||||||
import android.graphics.Point;
|
|
||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
import android.os.Bundle;
|
import android.os.Bundle;
|
||||||
import android.text.TextUtils;
|
import android.text.TextUtils;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import android.view.Display;
|
|
||||||
import android.view.KeyEvent;
|
import android.view.KeyEvent;
|
||||||
import android.view.Menu;
|
import android.view.Menu;
|
||||||
import android.view.MenuItem;
|
import android.view.MenuItem;
|
||||||
@ -58,7 +57,6 @@ import android.view.View;
|
|||||||
import android.view.View.OnClickListener;
|
import android.view.View.OnClickListener;
|
||||||
import android.view.View.OnKeyListener;
|
import android.view.View.OnKeyListener;
|
||||||
import android.view.View.OnTouchListener;
|
import android.view.View.OnTouchListener;
|
||||||
import android.view.WindowManager;
|
|
||||||
import android.view.accessibility.CaptioningManager;
|
import android.view.accessibility.CaptioningManager;
|
||||||
import android.widget.Button;
|
import android.widget.Button;
|
||||||
import android.widget.MediaController;
|
import android.widget.MediaController;
|
||||||
@ -67,13 +65,14 @@ import android.widget.PopupMenu.OnMenuItemClickListener;
|
|||||||
import android.widget.TextView;
|
import android.widget.TextView;
|
||||||
import android.widget.Toast;
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An activity that plays media using {@link DemoPlayer}.
|
* An activity that plays media using {@link DemoPlayer}.
|
||||||
*/
|
*/
|
||||||
public class PlayerActivity extends Activity implements SurfaceHolder.Callback, OnClickListener,
|
public class PlayerActivity extends Activity implements SurfaceHolder.Callback, OnClickListener,
|
||||||
DemoPlayer.Listener, DemoPlayer.TextListener, DemoPlayer.Id3MetadataListener,
|
DemoPlayer.Listener, DemoPlayer.CaptionListener, DemoPlayer.Id3MetadataListener,
|
||||||
AudioCapabilitiesReceiver.Listener {
|
AudioCapabilitiesReceiver.Listener {
|
||||||
|
|
||||||
public static final String CONTENT_TYPE_EXTRA = "content_type";
|
public static final String CONTENT_TYPE_EXTRA = "content_type";
|
||||||
@ -81,7 +80,6 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
|
|
||||||
private static final String TAG = "PlayerActivity";
|
private static final String TAG = "PlayerActivity";
|
||||||
|
|
||||||
private static final float CAPTION_LINE_HEIGHT_RATIO = 0.0533f;
|
|
||||||
private static final int MENU_GROUP_TRACKS = 1;
|
private static final int MENU_GROUP_TRACKS = 1;
|
||||||
private static final int ID_OFFSET = 2;
|
private static final int ID_OFFSET = 2;
|
||||||
|
|
||||||
@ -92,7 +90,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
private VideoSurfaceView surfaceView;
|
private VideoSurfaceView surfaceView;
|
||||||
private TextView debugTextView;
|
private TextView debugTextView;
|
||||||
private TextView playerStateTextView;
|
private TextView playerStateTextView;
|
||||||
private SubtitleView subtitleView;
|
private SubtitleLayout subtitleLayout;
|
||||||
private Button videoButton;
|
private Button videoButton;
|
||||||
private Button audioButton;
|
private Button audioButton;
|
||||||
private Button textButton;
|
private Button textButton;
|
||||||
@ -154,7 +152,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
debugTextView = (TextView) findViewById(R.id.debug_text_view);
|
debugTextView = (TextView) findViewById(R.id.debug_text_view);
|
||||||
|
|
||||||
playerStateTextView = (TextView) findViewById(R.id.player_state_view);
|
playerStateTextView = (TextView) findViewById(R.id.player_state_view);
|
||||||
subtitleView = (SubtitleView) findViewById(R.id.subtitles);
|
subtitleLayout = (SubtitleLayout) findViewById(R.id.subtitles);
|
||||||
|
|
||||||
mediaController = new MediaController(this);
|
mediaController = new MediaController(this);
|
||||||
mediaController.setAnchorView(root);
|
mediaController.setAnchorView(root);
|
||||||
@ -256,7 +254,7 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
if (player == null) {
|
if (player == null) {
|
||||||
player = new DemoPlayer(getRendererBuilder());
|
player = new DemoPlayer(getRendererBuilder());
|
||||||
player.addListener(this);
|
player.addListener(this);
|
||||||
player.setTextListener(this);
|
player.setCaptionListener(this);
|
||||||
player.setMetadataListener(this);
|
player.setMetadataListener(this);
|
||||||
player.seekTo(playerPosition);
|
player.seekTo(playerPosition);
|
||||||
playerNeedsPrepare = true;
|
playerNeedsPrepare = true;
|
||||||
@ -464,16 +462,11 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
debugRootView.setVisibility(View.VISIBLE);
|
debugRootView.setVisibility(View.VISIBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// DemoPlayer.TextListener implementation
|
// DemoPlayer.CaptionListener implementation
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onText(String text) {
|
public void onCues(List<Cue> cues) {
|
||||||
if (TextUtils.isEmpty(text)) {
|
subtitleLayout.setCues(cues);
|
||||||
subtitleView.setVisibility(View.INVISIBLE);
|
|
||||||
} else {
|
|
||||||
subtitleView.setVisibility(View.VISIBLE);
|
|
||||||
subtitleView.setText(text);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DemoPlayer.MetadataListener implementation
|
// DemoPlayer.MetadataListener implementation
|
||||||
@ -523,24 +516,16 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
|
|||||||
|
|
||||||
private void configureSubtitleView() {
|
private void configureSubtitleView() {
|
||||||
CaptionStyleCompat captionStyle;
|
CaptionStyleCompat captionStyle;
|
||||||
float captionTextSize = getCaptionFontSize();
|
float captionFontScale;
|
||||||
if (Util.SDK_INT >= 19) {
|
if (Util.SDK_INT >= 19) {
|
||||||
captionStyle = getUserCaptionStyleV19();
|
captionStyle = getUserCaptionStyleV19();
|
||||||
captionTextSize *= getUserCaptionFontScaleV19();
|
captionFontScale = getUserCaptionFontScaleV19();
|
||||||
} else {
|
} else {
|
||||||
captionStyle = CaptionStyleCompat.DEFAULT;
|
captionStyle = CaptionStyleCompat.DEFAULT;
|
||||||
|
captionFontScale = 1.0f;
|
||||||
}
|
}
|
||||||
subtitleView.setStyle(captionStyle);
|
subtitleLayout.setStyle(captionStyle);
|
||||||
subtitleView.setTextSize(captionTextSize);
|
subtitleLayout.setFontScale(captionFontScale);
|
||||||
}
|
|
||||||
|
|
||||||
private float getCaptionFontSize() {
|
|
||||||
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
|
|
||||||
.getDefaultDisplay();
|
|
||||||
Point displaySize = new Point();
|
|
||||||
display.getSize(displaySize);
|
|
||||||
return Math.max(getResources().getDimension(R.dimen.subtitle_minimum_font_size),
|
|
||||||
CAPTION_LINE_HEIGHT_RATIO * Math.min(displaySize.x, displaySize.y));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@TargetApi(19)
|
@TargetApi(19)
|
||||||
|
@ -235,14 +235,15 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||||
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
|
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
|
||||||
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
|
videoAdaptationSetIndex, videoRepresentationIndices, videoDataSource,
|
||||||
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset);
|
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
|
||||||
|
mainHandler, player);
|
||||||
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
|
||||||
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
|
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
|
||||||
DemoPlayer.TYPE_VIDEO);
|
DemoPlayer.TYPE_VIDEO);
|
||||||
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
|
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
|
||||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
|
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
|
||||||
debugRenderer = debugTextView != null
|
debugRenderer = debugTextView != null
|
||||||
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
|
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the audio chunk sources.
|
// Build the audio chunk sources.
|
||||||
@ -259,7 +260,7 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
format.audioSamplingRate + "Hz)");
|
format.audioSamplingRate + "Hz)");
|
||||||
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
|
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
|
||||||
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
|
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS,
|
||||||
elapsedRealtimeOffset));
|
elapsedRealtimeOffset, mainHandler, player));
|
||||||
codecs.add(format.codecs);
|
codecs.add(format.codecs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -316,7 +317,8 @@ public class DashRendererBuilder implements RendererBuilder,
|
|||||||
Representation representation = representations.get(j);
|
Representation representation = representations.get(j);
|
||||||
textTrackNameList.add(representation.format.id);
|
textTrackNameList.add(representation.format.id);
|
||||||
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
|
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
|
||||||
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset));
|
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset,
|
||||||
|
mainHandler, player));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@ import com.google.android.exoplayer.ExoPlaybackException;
|
|||||||
import com.google.android.exoplayer.MediaCodecTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecTrackRenderer;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
|
import com.google.android.exoplayer.upstream.BandwidthMeter;
|
||||||
|
|
||||||
import android.widget.TextView;
|
import android.widget.TextView;
|
||||||
|
|
||||||
@ -31,15 +32,22 @@ import android.widget.TextView;
|
|||||||
private final TextView textView;
|
private final TextView textView;
|
||||||
private final DemoPlayer player;
|
private final DemoPlayer player;
|
||||||
private final MediaCodecTrackRenderer renderer;
|
private final MediaCodecTrackRenderer renderer;
|
||||||
|
private final BandwidthMeter bandwidthMeter;
|
||||||
|
|
||||||
private volatile boolean pendingFailure;
|
private volatile boolean pendingFailure;
|
||||||
private volatile long currentPositionUs;
|
private volatile long currentPositionUs;
|
||||||
|
|
||||||
public DebugTrackRenderer(TextView textView, DemoPlayer player,
|
public DebugTrackRenderer(TextView textView, DemoPlayer player,
|
||||||
MediaCodecTrackRenderer renderer) {
|
MediaCodecTrackRenderer renderer) {
|
||||||
|
this(textView, player, renderer, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DebugTrackRenderer(TextView textView, DemoPlayer player, MediaCodecTrackRenderer renderer,
|
||||||
|
BandwidthMeter bandwidthMeter) {
|
||||||
this.textView = textView;
|
this.textView = textView;
|
||||||
this.player = player;
|
this.player = player;
|
||||||
this.renderer = renderer;
|
this.renderer = renderer;
|
||||||
|
this.bandwidthMeter = bandwidthMeter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void injectFailure() {
|
public void injectFailure() {
|
||||||
@ -77,7 +85,12 @@ import android.widget.TextView;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String getRenderString() {
|
private String getRenderString() {
|
||||||
return getQualityString() + " " + renderer.codecCounters.getDebugString();
|
return getTimeString() + " " + getQualityString() + " " + getBandwidthString() + " "
|
||||||
|
+ renderer.codecCounters.getDebugString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getTimeString() {
|
||||||
|
return "ms(" + (currentPositionUs / 1000) + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getQualityString() {
|
private String getQualityString() {
|
||||||
@ -86,6 +99,15 @@ import android.widget.TextView;
|
|||||||
: "id:" + format.id + " br:" + format.bitrate + " h:" + format.height;
|
: "id:" + format.id + " br:" + format.bitrate + " h:" + format.height;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String getBandwidthString() {
|
||||||
|
if (bandwidthMeter == null
|
||||||
|
|| bandwidthMeter.getBitrateEstimate() == BandwidthMeter.NO_ESTIMATE) {
|
||||||
|
return "bw:?";
|
||||||
|
} else {
|
||||||
|
return "bw:" + (bandwidthMeter.getBitrateEstimate() / 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected long getCurrentPositionUs() {
|
protected long getCurrentPositionUs() {
|
||||||
return currentPositionUs;
|
return currentPositionUs;
|
||||||
|
@ -21,14 +21,17 @@ import com.google.android.exoplayer.ExoPlayer;
|
|||||||
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
|
||||||
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
|
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
|
||||||
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.audio.AudioTrack;
|
import com.google.android.exoplayer.audio.AudioTrack;
|
||||||
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
import com.google.android.exoplayer.chunk.ChunkSampleSource;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
|
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
|
||||||
|
import com.google.android.exoplayer.dash.DashChunkSource;
|
||||||
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
|
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
|
||||||
import com.google.android.exoplayer.hls.HlsSampleSource;
|
import com.google.android.exoplayer.hls.HlsSampleSource;
|
||||||
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
|
import com.google.android.exoplayer.metadata.MetadataTrackRenderer;
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
import com.google.android.exoplayer.text.TextRenderer;
|
import com.google.android.exoplayer.text.TextRenderer;
|
||||||
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
|
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
|
||||||
import com.google.android.exoplayer.util.PlayerControl;
|
import com.google.android.exoplayer.util.PlayerControl;
|
||||||
@ -39,6 +42,8 @@ import android.os.Looper;
|
|||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
|
|
||||||
@ -50,7 +55,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
|
|||||||
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
|
public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventListener,
|
||||||
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
|
HlsSampleSource.EventListener, DefaultBandwidthMeter.EventListener,
|
||||||
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
|
MediaCodecVideoTrackRenderer.EventListener, MediaCodecAudioTrackRenderer.EventListener,
|
||||||
StreamingDrmSessionManager.EventListener, TextRenderer {
|
StreamingDrmSessionManager.EventListener, DashChunkSource.EventListener, TextRenderer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds renderers for the player.
|
* Builds renderers for the player.
|
||||||
@ -132,13 +137,14 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
int mediaStartTimeMs, int mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs);
|
int mediaStartTimeMs, int mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs);
|
||||||
void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
|
void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
|
||||||
long initializationDurationMs);
|
long initializationDurationMs);
|
||||||
|
void onSeekRangeChanged(TimeRange seekRange);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A listener for receiving notifications of timed text.
|
* A listener for receiving notifications of timed text.
|
||||||
*/
|
*/
|
||||||
public interface TextListener {
|
public interface CaptionListener {
|
||||||
void onText(String text);
|
void onCues(List<Cue> cues);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -190,7 +196,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
private int[] selectedTracks;
|
private int[] selectedTracks;
|
||||||
private boolean backgrounded;
|
private boolean backgrounded;
|
||||||
|
|
||||||
private TextListener textListener;
|
private CaptionListener captionListener;
|
||||||
private Id3MetadataListener id3MetadataListener;
|
private Id3MetadataListener id3MetadataListener;
|
||||||
private InternalErrorListener internalErrorListener;
|
private InternalErrorListener internalErrorListener;
|
||||||
private InfoListener infoListener;
|
private InfoListener infoListener;
|
||||||
@ -229,8 +235,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
infoListener = listener;
|
infoListener = listener;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setTextListener(TextListener listener) {
|
public void setCaptionListener(CaptionListener listener) {
|
||||||
textListener = listener;
|
captionListener = listener;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMetadataListener(Id3MetadataListener listener) {
|
public void setMetadataListener(Id3MetadataListener listener) {
|
||||||
@ -265,8 +271,8 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
}
|
}
|
||||||
selectedTracks[type] = index;
|
selectedTracks[type] = index;
|
||||||
pushTrackSelection(type, true);
|
pushTrackSelection(type, true);
|
||||||
if (type == TYPE_TEXT && index == DISABLED_TRACK && textListener != null) {
|
if (type == TYPE_TEXT && index == DISABLED_TRACK && captionListener != null) {
|
||||||
textListener.onText(null);
|
captionListener.onCues(Collections.<Cue>emptyList());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -506,8 +512,15 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onText(String text) {
|
public void onCues(List<Cue> cues) {
|
||||||
processText(text);
|
processCues(cues);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSeekRangeChanged(TimeRange seekRange) {
|
||||||
|
if (infoListener != null) {
|
||||||
|
infoListener.onSeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* package */ MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>
|
/* package */ MetadataTrackRenderer.MetadataRenderer<Map<String, Object>>
|
||||||
@ -607,11 +620,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* package */ void processText(String text) {
|
/* package */ void processCues(List<Cue> cues) {
|
||||||
if (textListener == null || selectedTracks[TYPE_TEXT] == DISABLED_TRACK) {
|
if (captionListener == null || selectedTracks[TYPE_TEXT] == DISABLED_TRACK) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
textListener.onText(text);
|
captionListener.onCues(cues);
|
||||||
}
|
}
|
||||||
|
|
||||||
private class InternalRendererBuilderCallback implements RendererBuilderCallback {
|
private class InternalRendererBuilderCallback implements RendererBuilderCallback {
|
||||||
|
@ -23,6 +23,7 @@ import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallba
|
|||||||
import com.google.android.exoplayer.extractor.Extractor;
|
import com.google.android.exoplayer.extractor.Extractor;
|
||||||
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
|
import com.google.android.exoplayer.extractor.ExtractorSampleSource;
|
||||||
import com.google.android.exoplayer.upstream.DataSource;
|
import com.google.android.exoplayer.upstream.DataSource;
|
||||||
|
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
|
||||||
import com.google.android.exoplayer.upstream.DefaultUriDataSource;
|
import com.google.android.exoplayer.upstream.DefaultUriDataSource;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
@ -55,7 +56,9 @@ public class ExtractorRendererBuilder implements RendererBuilder {
|
|||||||
@Override
|
@Override
|
||||||
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
|
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
|
||||||
// Build the video and audio renderers.
|
// Build the video and audio renderers.
|
||||||
DataSource dataSource = new DefaultUriDataSource(context, userAgent);
|
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(),
|
||||||
|
null);
|
||||||
|
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
|
||||||
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, extractor, 2,
|
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, extractor, 2,
|
||||||
BUFFER_SIZE);
|
BUFFER_SIZE);
|
||||||
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
|
||||||
@ -66,7 +69,7 @@ public class ExtractorRendererBuilder implements RendererBuilder {
|
|||||||
|
|
||||||
// Build the debug renderer.
|
// Build the debug renderer.
|
||||||
TrackRenderer debugRenderer = debugTextView != null
|
TrackRenderer debugRenderer = debugTextView != null
|
||||||
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
|
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
|
||||||
|
|
||||||
// Invoke the callback.
|
// Invoke the callback.
|
||||||
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
|
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
|
||||||
|
@ -121,7 +121,7 @@ public class HlsRendererBuilder implements RendererBuilder, ManifestCallback<Hls
|
|||||||
|
|
||||||
// Build the debug renderer.
|
// Build the debug renderer.
|
||||||
TrackRenderer debugRenderer = debugTextView != null
|
TrackRenderer debugRenderer = debugTextView != null
|
||||||
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
|
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
|
||||||
|
|
||||||
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
|
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
|
||||||
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
|
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
|
||||||
|
@ -174,7 +174,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
|
|||||||
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
|
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
|
||||||
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
|
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
|
||||||
debugRenderer = debugTextView != null
|
debugRenderer = debugTextView != null
|
||||||
? new DebugTrackRenderer(debugTextView, player, videoRenderer) : null;
|
? new DebugTrackRenderer(debugTextView, player, videoRenderer, bandwidthMeter) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the audio renderer.
|
// Build the audio renderer.
|
||||||
|
@ -26,14 +26,12 @@
|
|||||||
android:layout_height="match_parent"
|
android:layout_height="match_parent"
|
||||||
android:layout_gravity="center"/>
|
android:layout_gravity="center"/>
|
||||||
|
|
||||||
<com.google.android.exoplayer.text.SubtitleView android:id="@+id/subtitles"
|
<com.google.android.exoplayer.text.SubtitleLayout android:id="@+id/subtitles"
|
||||||
android:layout_width="wrap_content"
|
android:layout_width="match_parent"
|
||||||
android:layout_height="wrap_content"
|
android:layout_height="match_parent"
|
||||||
android:layout_gravity="bottom|center_horizontal"
|
|
||||||
android:layout_marginLeft="16dp"
|
android:layout_marginLeft="16dp"
|
||||||
android:layout_marginRight="16dp"
|
android:layout_marginRight="16dp"
|
||||||
android:layout_marginBottom="32dp"
|
android:layout_marginBottom="32dp"/>
|
||||||
android:visibility="invisible"/>
|
|
||||||
|
|
||||||
<View android:id="@+id/shutter"
|
<View android:id="@+id/shutter"
|
||||||
android:layout_width="match_parent"
|
android:layout_width="match_parent"
|
||||||
|
@ -74,7 +74,7 @@ publish {
|
|||||||
userOrg = 'google'
|
userOrg = 'google'
|
||||||
groupId = 'com.google.android.exoplayer'
|
groupId = 'com.google.android.exoplayer'
|
||||||
artifactId = 'exoplayer'
|
artifactId = 'exoplayer'
|
||||||
version = 'r1.3.2'
|
version = 'r1.3.3'
|
||||||
description = 'The ExoPlayer library.'
|
description = 'The ExoPlayer library.'
|
||||||
website = 'https://github.com/google/ExoPlayer'
|
website = 'https://github.com/google/ExoPlayer'
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ public class ExoPlayerLibraryInfo {
|
|||||||
/**
|
/**
|
||||||
* The version of the library, expressed as a string.
|
* The version of the library, expressed as a string.
|
||||||
*/
|
*/
|
||||||
public static final String VERSION = "1.3.2";
|
public static final String VERSION = "1.3.3";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The version of the library, expressed as an integer.
|
* The version of the library, expressed as an integer.
|
||||||
@ -34,7 +34,7 @@ public class ExoPlayerLibraryInfo {
|
|||||||
* Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
|
* Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
|
||||||
* corresponding integer version 001002003.
|
* corresponding integer version 001002003.
|
||||||
*/
|
*/
|
||||||
public static final int VERSION_INT = 001003002;
|
public static final int VERSION_INT = 001003003;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Whether the library was compiled with {@link com.google.android.exoplayer.util.Assertions}
|
* Whether the library was compiled with {@link com.google.android.exoplayer.util.Assertions}
|
||||||
|
@ -0,0 +1,102 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A container to store a start and end time in microseconds.
|
||||||
|
*/
|
||||||
|
public final class TimeRange {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a range of time whose bounds change in bulk increments rather than smoothly over
|
||||||
|
* time.
|
||||||
|
*/
|
||||||
|
public static final int TYPE_SNAPSHOT = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The type of this time range.
|
||||||
|
*/
|
||||||
|
public final int type;
|
||||||
|
|
||||||
|
private final long startTimeUs;
|
||||||
|
private final long endTimeUs;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link TimeRange} of the appropriate type.
|
||||||
|
*
|
||||||
|
* @param type The type of the TimeRange.
|
||||||
|
* @param startTimeUs The beginning of the TimeRange.
|
||||||
|
* @param endTimeUs The end of the TimeRange.
|
||||||
|
*/
|
||||||
|
public TimeRange(int type, long startTimeUs, long endTimeUs) {
|
||||||
|
this.type = type;
|
||||||
|
this.startTimeUs = startTimeUs;
|
||||||
|
this.endTimeUs = endTimeUs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the start and end times (in milliseconds) of the TimeRange in the provided array,
|
||||||
|
* or creates a new one.
|
||||||
|
*
|
||||||
|
* @param out An array to store the start and end times; can be null.
|
||||||
|
* @return An array containing the start time (index 0) and end time (index 1) in milliseconds.
|
||||||
|
*/
|
||||||
|
public long[] getCurrentBoundsMs(long[] out) {
|
||||||
|
out = getCurrentBoundsUs(out);
|
||||||
|
out[0] /= 1000;
|
||||||
|
out[1] /= 1000;
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the start and end times (in microseconds) of the TimeRange in the provided array,
|
||||||
|
* or creates a new one.
|
||||||
|
*
|
||||||
|
* @param out An array to store the start and end times; can be null.
|
||||||
|
* @return An array containing the start time (index 0) and end time (index 1) in microseconds.
|
||||||
|
*/
|
||||||
|
public long[] getCurrentBoundsUs(long[] out) {
|
||||||
|
if (out == null || out.length < 2) {
|
||||||
|
out = new long[2];
|
||||||
|
}
|
||||||
|
out[0] = startTimeUs;
|
||||||
|
out[1] = endTimeUs;
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int hashCode = 0;
|
||||||
|
hashCode |= type << 30;
|
||||||
|
hashCode |= (((startTimeUs + endTimeUs) / 1000) & 0x3FFFFFFF);
|
||||||
|
return hashCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (other == this) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (other instanceof TimeRange) {
|
||||||
|
TimeRange otherTimeRange = (TimeRange) other;
|
||||||
|
return (otherTimeRange.type == type) && (otherTimeRange.startTimeUs == startTimeUs)
|
||||||
|
&& (otherTimeRange.endTimeUs == endTimeUs);
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -449,7 +449,7 @@ public final class AudioTrack {
|
|||||||
// This is the first time we've seen this {@code buffer}.
|
// This is the first time we've seen this {@code buffer}.
|
||||||
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
|
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
|
||||||
long bufferStartTime = presentationTimeUs - framesToDurationUs(bytesToFrames(size));
|
long bufferStartTime = presentationTimeUs - framesToDurationUs(bytesToFrames(size));
|
||||||
if (startMediaTimeUs == START_NOT_SET) {
|
if (startMediaTimeState == START_NOT_SET) {
|
||||||
startMediaTimeUs = Math.max(0, bufferStartTime);
|
startMediaTimeUs = Math.max(0, bufferStartTime);
|
||||||
startMediaTimeState = START_IN_SYNC;
|
startMediaTimeState = START_IN_SYNC;
|
||||||
} else {
|
} else {
|
||||||
@ -573,7 +573,7 @@ public final class AudioTrack {
|
|||||||
if (isInitialized()) {
|
if (isInitialized()) {
|
||||||
submittedBytes = 0;
|
submittedBytes = 0;
|
||||||
temporaryBufferSize = 0;
|
temporaryBufferSize = 0;
|
||||||
startMediaTimeUs = START_NOT_SET;
|
startMediaTimeState = START_NOT_SET;
|
||||||
latencyUs = 0;
|
latencyUs = 0;
|
||||||
resetSyncParams();
|
resetSyncParams();
|
||||||
int playState = audioTrack.getPlayState();
|
int playState = audioTrack.getPlayState();
|
||||||
@ -623,7 +623,7 @@ public final class AudioTrack {
|
|||||||
|
|
||||||
/** Returns whether {@link #getCurrentPositionUs} can return the current playback position. */
|
/** Returns whether {@link #getCurrentPositionUs} can return the current playback position. */
|
||||||
private boolean hasCurrentPositionUs() {
|
private boolean hasCurrentPositionUs() {
|
||||||
return isInitialized() && startMediaTimeUs != START_NOT_SET;
|
return isInitialized() && startMediaTimeState != START_NOT_SET;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Updates the audio track latency and playback position parameters. */
|
/** Updates the audio track latency and playback position parameters. */
|
||||||
|
@ -67,6 +67,10 @@ public abstract class Chunk implements Loadable {
|
|||||||
* Value of {@link #trigger} for a load triggered by an adaptive format selection.
|
* Value of {@link #trigger} for a load triggered by an adaptive format selection.
|
||||||
*/
|
*/
|
||||||
public static final int TRIGGER_ADAPTIVE = 3;
|
public static final int TRIGGER_ADAPTIVE = 3;
|
||||||
|
/**
|
||||||
|
* Value of {@link #trigger} for a load triggered whilst in a trick play mode.
|
||||||
|
*/
|
||||||
|
public static final int TRIGGER_TRICK_PLAY = 4;
|
||||||
/**
|
/**
|
||||||
* Implementations may define custom {@link #trigger} codes greater than or equal to this value.
|
* Implementations may define custom {@link #trigger} codes greater than or equal to this value.
|
||||||
*/
|
*/
|
||||||
|
@ -17,6 +17,7 @@ package com.google.android.exoplayer.dash;
|
|||||||
|
|
||||||
import com.google.android.exoplayer.BehindLiveWindowException;
|
import com.google.android.exoplayer.BehindLiveWindowException;
|
||||||
import com.google.android.exoplayer.MediaFormat;
|
import com.google.android.exoplayer.MediaFormat;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.TrackInfo;
|
import com.google.android.exoplayer.TrackInfo;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.Chunk;
|
import com.google.android.exoplayer.chunk.Chunk;
|
||||||
@ -50,6 +51,8 @@ import com.google.android.exoplayer.util.ManifestFetcher;
|
|||||||
import com.google.android.exoplayer.util.MimeTypes;
|
import com.google.android.exoplayer.util.MimeTypes;
|
||||||
import com.google.android.exoplayer.util.SystemClock;
|
import com.google.android.exoplayer.util.SystemClock;
|
||||||
|
|
||||||
|
import android.os.Handler;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@ -63,6 +66,20 @@ import java.util.List;
|
|||||||
*/
|
*/
|
||||||
public class DashChunkSource implements ChunkSource {
|
public class DashChunkSource implements ChunkSource {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
|
||||||
|
*/
|
||||||
|
public interface EventListener {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoked when the available seek range of the stream has changed.
|
||||||
|
*
|
||||||
|
* @param seekRange The range which specifies available content that can be seeked to.
|
||||||
|
*/
|
||||||
|
public void onSeekRangeChanged(TimeRange seekRange);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Thrown when an AdaptationSet is missing from the MPD.
|
* Thrown when an AdaptationSet is missing from the MPD.
|
||||||
*/
|
*/
|
||||||
@ -79,6 +96,9 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
*/
|
*/
|
||||||
public static final int USE_ALL_TRACKS = -1;
|
public static final int USE_ALL_TRACKS = -1;
|
||||||
|
|
||||||
|
private final Handler eventHandler;
|
||||||
|
private final EventListener eventListener;
|
||||||
|
|
||||||
private final TrackInfo trackInfo;
|
private final TrackInfo trackInfo;
|
||||||
private final DataSource dataSource;
|
private final DataSource dataSource;
|
||||||
private final FormatEvaluator evaluator;
|
private final FormatEvaluator evaluator;
|
||||||
@ -99,6 +119,10 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
|
|
||||||
private DrmInitData drmInitData;
|
private DrmInitData drmInitData;
|
||||||
private MediaPresentationDescription currentManifest;
|
private MediaPresentationDescription currentManifest;
|
||||||
|
private TimeRange seekRange;
|
||||||
|
private long[] seekRangeValues;
|
||||||
|
private int firstAvailableSegmentNum;
|
||||||
|
private int lastAvailableSegmentNum;
|
||||||
private boolean finishedCurrentManifest;
|
private boolean finishedCurrentManifest;
|
||||||
|
|
||||||
private boolean lastChunkWasInitialization;
|
private boolean lastChunkWasInitialization;
|
||||||
@ -142,7 +166,7 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
|
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
|
||||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
|
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
|
||||||
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
|
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator,
|
||||||
new SystemClock(), 0, 0);
|
new SystemClock(), 0, 0, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -167,19 +191,24 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
|
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
|
||||||
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
|
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
|
||||||
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
|
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
|
||||||
|
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
|
||||||
|
* null if delivery of events is not required.
|
||||||
|
* @param eventListener A listener of events. May be null if delivery of events is not required.
|
||||||
*/
|
*/
|
||||||
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||||
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
|
||||||
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs) {
|
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs,
|
||||||
|
Handler eventHandler, EventListener eventListener) {
|
||||||
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
|
||||||
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000,
|
||||||
elapsedRealtimeOffsetMs * 1000);
|
elapsedRealtimeOffsetMs * 1000, eventHandler, eventListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
/* package */ DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
|
||||||
MediaPresentationDescription initialManifest, int adaptationSetIndex,
|
MediaPresentationDescription initialManifest, int adaptationSetIndex,
|
||||||
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
|
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
|
||||||
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs) {
|
Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs,
|
||||||
|
Handler eventHandler, EventListener eventListener) {
|
||||||
this.manifestFetcher = manifestFetcher;
|
this.manifestFetcher = manifestFetcher;
|
||||||
this.currentManifest = initialManifest;
|
this.currentManifest = initialManifest;
|
||||||
this.adaptationSetIndex = adaptationSetIndex;
|
this.adaptationSetIndex = adaptationSetIndex;
|
||||||
@ -189,8 +218,11 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
this.systemClock = systemClock;
|
this.systemClock = systemClock;
|
||||||
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
|
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
|
||||||
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
|
this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs;
|
||||||
|
this.eventHandler = eventHandler;
|
||||||
|
this.eventListener = eventListener;
|
||||||
this.evaluation = new Evaluation();
|
this.evaluation = new Evaluation();
|
||||||
this.headerBuilder = new StringBuilder();
|
this.headerBuilder = new StringBuilder();
|
||||||
|
this.seekRangeValues = new long[2];
|
||||||
|
|
||||||
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
|
drmInitData = getDrmInitData(currentManifest, adaptationSetIndex);
|
||||||
Representation[] representations = getFilteredRepresentations(currentManifest,
|
Representation[] representations = getFilteredRepresentations(currentManifest,
|
||||||
@ -229,12 +261,27 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
return trackInfo;
|
return trackInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// VisibleForTesting
|
||||||
|
/* package */ TimeRange getSeekRange() {
|
||||||
|
return seekRange;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void enable() {
|
public void enable() {
|
||||||
evaluator.enable();
|
evaluator.enable();
|
||||||
if (manifestFetcher != null) {
|
if (manifestFetcher != null) {
|
||||||
manifestFetcher.enable();
|
manifestFetcher.enable();
|
||||||
}
|
}
|
||||||
|
DashSegmentIndex segmentIndex =
|
||||||
|
representationHolders.get(formats[0].id).representation.getIndex();
|
||||||
|
if (segmentIndex == null) {
|
||||||
|
seekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, currentManifest.duration * 1000);
|
||||||
|
notifySeekRangeChanged(seekRange);
|
||||||
|
} else {
|
||||||
|
long nowUs = getNowUs();
|
||||||
|
updateAvailableSegmentBounds(segmentIndex, nowUs);
|
||||||
|
updateSeekRange(segmentIndex, nowUs);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -243,6 +290,7 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
if (manifestFetcher != null) {
|
if (manifestFetcher != null) {
|
||||||
manifestFetcher.disable();
|
manifestFetcher.disable();
|
||||||
}
|
}
|
||||||
|
seekRange = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -268,6 +316,10 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
}
|
}
|
||||||
currentManifest = newManifest;
|
currentManifest = newManifest;
|
||||||
finishedCurrentManifest = false;
|
finishedCurrentManifest = false;
|
||||||
|
|
||||||
|
long nowUs = getNowUs();
|
||||||
|
updateAvailableSegmentBounds(newRepresentations[0].getIndex(), nowUs);
|
||||||
|
updateSeekRange(newRepresentations[0].getIndex(), nowUs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
|
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
|
||||||
@ -334,36 +386,21 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
long nowUs;
|
|
||||||
if (elapsedRealtimeOffsetUs != 0) {
|
|
||||||
nowUs = (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
|
||||||
} else {
|
|
||||||
nowUs = System.currentTimeMillis() * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
int firstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
|
|
||||||
int lastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
|
|
||||||
boolean indexUnbounded = lastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED;
|
|
||||||
if (indexUnbounded) {
|
|
||||||
// The index is itself unbounded. We need to use the current time to calculate the range of
|
|
||||||
// available segments.
|
|
||||||
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
|
||||||
if (currentManifest.timeShiftBufferDepth != -1) {
|
|
||||||
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
|
|
||||||
firstAvailableSegmentNum = Math.max(firstAvailableSegmentNum,
|
|
||||||
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
|
|
||||||
}
|
|
||||||
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
|
|
||||||
// index of the last completed segment.
|
|
||||||
lastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
int segmentNum;
|
int segmentNum;
|
||||||
|
boolean indexUnbounded = segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED;
|
||||||
if (queue.isEmpty()) {
|
if (queue.isEmpty()) {
|
||||||
if (currentManifest.dynamic) {
|
if (currentManifest.dynamic) {
|
||||||
seekPositionUs = getLiveSeekPosition(nowUs, indexUnbounded, segmentIndex.isExplicit());
|
seekRangeValues = seekRange.getCurrentBoundsUs(seekRangeValues);
|
||||||
|
seekPositionUs = Math.max(seekPositionUs, seekRangeValues[0]);
|
||||||
|
seekPositionUs = Math.min(seekPositionUs, seekRangeValues[1]);
|
||||||
}
|
}
|
||||||
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
|
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
|
||||||
|
|
||||||
|
// if the index is unbounded then the result of getSegmentNum isn't clamped to ensure that
|
||||||
|
// it doesn't exceed the last available segment. Clamp it here.
|
||||||
|
if (indexUnbounded) {
|
||||||
|
segmentNum = Math.min(segmentNum, lastAvailableSegmentNum);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
MediaChunk previous = queue.get(out.queueSize - 1);
|
MediaChunk previous = queue.get(out.queueSize - 1);
|
||||||
segmentNum = previous.isLastChunk ? -1
|
segmentNum = previous.isLastChunk ? -1
|
||||||
@ -432,6 +469,59 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateAvailableSegmentBounds(DashSegmentIndex segmentIndex, long nowUs) {
|
||||||
|
int indexFirstAvailableSegmentNum = segmentIndex.getFirstSegmentNum();
|
||||||
|
int indexLastAvailableSegmentNum = segmentIndex.getLastSegmentNum();
|
||||||
|
if (indexLastAvailableSegmentNum == DashSegmentIndex.INDEX_UNBOUNDED) {
|
||||||
|
// The index is itself unbounded. We need to use the current time to calculate the range of
|
||||||
|
// available segments.
|
||||||
|
long liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
||||||
|
if (currentManifest.timeShiftBufferDepth != -1) {
|
||||||
|
long bufferDepthUs = currentManifest.timeShiftBufferDepth * 1000;
|
||||||
|
indexFirstAvailableSegmentNum = Math.max(indexFirstAvailableSegmentNum,
|
||||||
|
segmentIndex.getSegmentNum(liveEdgeTimestampUs - bufferDepthUs));
|
||||||
|
}
|
||||||
|
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
|
||||||
|
// index of the last completed segment.
|
||||||
|
indexLastAvailableSegmentNum = segmentIndex.getSegmentNum(liveEdgeTimestampUs) - 1;
|
||||||
|
}
|
||||||
|
firstAvailableSegmentNum = indexFirstAvailableSegmentNum;
|
||||||
|
lastAvailableSegmentNum = indexLastAvailableSegmentNum;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateSeekRange(DashSegmentIndex segmentIndex, long nowUs) {
|
||||||
|
long earliestSeekPosition = segmentIndex.getTimeUs(firstAvailableSegmentNum);
|
||||||
|
long latestSeekPosition = segmentIndex.getTimeUs(lastAvailableSegmentNum)
|
||||||
|
+ segmentIndex.getDurationUs(lastAvailableSegmentNum);
|
||||||
|
if (currentManifest.dynamic) {
|
||||||
|
long liveEdgeTimestampUs;
|
||||||
|
if (segmentIndex.getLastSegmentNum() == DashSegmentIndex.INDEX_UNBOUNDED) {
|
||||||
|
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
||||||
|
} else {
|
||||||
|
liveEdgeTimestampUs = segmentIndex.getTimeUs(segmentIndex.getLastSegmentNum())
|
||||||
|
+ segmentIndex.getDurationUs(segmentIndex.getLastSegmentNum());
|
||||||
|
if (!segmentIndex.isExplicit()) {
|
||||||
|
// Some segments defined by the index may not be available yet. Bound the calculated live
|
||||||
|
// edge based on the elapsed time since the manifest became available.
|
||||||
|
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
|
||||||
|
nowUs - currentManifest.availabilityStartTime * 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// it's possible that the live edge latency actually puts our latest position before
|
||||||
|
// the earliest position in the case of a DVR-like stream that's just starting up, so
|
||||||
|
// in that case just return the earliest position instead
|
||||||
|
latestSeekPosition = Math.max(earliestSeekPosition, liveEdgeTimestampUs - liveEdgeLatencyUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
TimeRange newSeekRange = new TimeRange(TimeRange.TYPE_SNAPSHOT, earliestSeekPosition,
|
||||||
|
latestSeekPosition);
|
||||||
|
if (seekRange == null || !seekRange.equals(newSeekRange)) {
|
||||||
|
seekRange = newSeekRange;
|
||||||
|
notifySeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean mimeTypeIsWebm(String mimeType) {
|
private static boolean mimeTypeIsWebm(String mimeType) {
|
||||||
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
|
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
|
||||||
}
|
}
|
||||||
@ -491,36 +581,12 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private long getNowUs() {
|
||||||
* For live playbacks, determines the seek position that snaps playback to be
|
if (elapsedRealtimeOffsetUs != 0) {
|
||||||
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
|
return (systemClock.elapsedRealtime() * 1000) + elapsedRealtimeOffsetUs;
|
||||||
*
|
|
||||||
* @param nowUs An estimate of the current server time, in microseconds.
|
|
||||||
* @param indexUnbounded True if the segment index for this source is unbounded. False otherwise.
|
|
||||||
* @param indexExplicit True if the segment index is explicit. False otherwise.
|
|
||||||
* @return The seek position in microseconds.
|
|
||||||
*/
|
|
||||||
private long getLiveSeekPosition(long nowUs, boolean indexUnbounded, boolean indexExplicit) {
|
|
||||||
long liveEdgeTimestampUs;
|
|
||||||
if (indexUnbounded) {
|
|
||||||
liveEdgeTimestampUs = nowUs - currentManifest.availabilityStartTime * 1000;
|
|
||||||
} else {
|
} else {
|
||||||
liveEdgeTimestampUs = Long.MIN_VALUE;
|
return System.currentTimeMillis() * 1000;
|
||||||
for (RepresentationHolder representationHolder : representationHolders.values()) {
|
|
||||||
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
|
|
||||||
int lastSegmentNum = segmentIndex.getLastSegmentNum();
|
|
||||||
long indexLiveEdgeTimestampUs = segmentIndex.getTimeUs(lastSegmentNum)
|
|
||||||
+ segmentIndex.getDurationUs(lastSegmentNum);
|
|
||||||
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
|
|
||||||
}
|
|
||||||
if (!indexExplicit) {
|
|
||||||
// Some segments defined by the index may not be available yet. Bound the calculated live
|
|
||||||
// edge based on the elapsed time since the manifest became available.
|
|
||||||
liveEdgeTimestampUs = Math.min(liveEdgeTimestampUs,
|
|
||||||
nowUs - currentManifest.availabilityStartTime * 1000);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return liveEdgeTimestampUs - liveEdgeLatencyUs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
|
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
|
||||||
@ -571,6 +637,17 @@ public class DashChunkSource implements ChunkSource {
|
|||||||
Collections.singletonList(period));
|
Collections.singletonList(period));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void notifySeekRangeChanged(final TimeRange seekRange) {
|
||||||
|
if (eventHandler != null && eventListener != null) {
|
||||||
|
eventHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
eventListener.onSeekRangeChanged(seekRange);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static class RepresentationHolder {
|
private static class RepresentationHolder {
|
||||||
|
|
||||||
public final Representation representation;
|
public final Representation representation;
|
||||||
|
@ -217,7 +217,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
|
|||||||
* Provides access to {@link MediaDrm#setPropertyString(String, String)}.
|
* Provides access to {@link MediaDrm#setPropertyString(String, String)}.
|
||||||
* <p>
|
* <p>
|
||||||
* This method may be called when the manager is in any state.
|
* This method may be called when the manager is in any state.
|
||||||
*
|
*
|
||||||
* @param key The property to write.
|
* @param key The property to write.
|
||||||
* @param value The value to write.
|
* @param value The value to write.
|
||||||
*/
|
*/
|
||||||
|
@ -37,11 +37,15 @@ import java.util.List;
|
|||||||
|
|
||||||
private static final String TAG = "H264Reader";
|
private static final String TAG = "H264Reader";
|
||||||
|
|
||||||
private static final int NAL_UNIT_TYPE_IDR = 5;
|
private static final int FRAME_TYPE_I = 2;
|
||||||
private static final int NAL_UNIT_TYPE_SEI = 6;
|
private static final int FRAME_TYPE_ALL_I = 7;
|
||||||
private static final int NAL_UNIT_TYPE_SPS = 7;
|
|
||||||
private static final int NAL_UNIT_TYPE_PPS = 8;
|
private static final int NAL_UNIT_TYPE_IFR = 1; // Coded slice of a non-IDR picture
|
||||||
private static final int NAL_UNIT_TYPE_AUD = 9;
|
private static final int NAL_UNIT_TYPE_IDR = 5; // Coded slice of an IDR picture
|
||||||
|
private static final int NAL_UNIT_TYPE_SEI = 6; // Supplemental enhancement information
|
||||||
|
private static final int NAL_UNIT_TYPE_SPS = 7; // Sequence parameter set
|
||||||
|
private static final int NAL_UNIT_TYPE_PPS = 8; // Picture parameter set
|
||||||
|
private static final int NAL_UNIT_TYPE_AUD = 9; // Access unit delimiter
|
||||||
private static final int EXTENDED_SAR = 0xFF;
|
private static final int EXTENDED_SAR = 0xFF;
|
||||||
private static final float[] ASPECT_RATIO_IDC_VALUES = new float[] {
|
private static final float[] ASPECT_RATIO_IDC_VALUES = new float[] {
|
||||||
1f /* Unspecified. Assume square */,
|
1f /* Unspecified. Assume square */,
|
||||||
@ -69,6 +73,7 @@ import java.util.List;
|
|||||||
// State that should be reset on seek.
|
// State that should be reset on seek.
|
||||||
private final SeiReader seiReader;
|
private final SeiReader seiReader;
|
||||||
private final boolean[] prefixFlags;
|
private final boolean[] prefixFlags;
|
||||||
|
private final IfrParserBuffer ifrParserBuffer;
|
||||||
private final NalUnitTargetBuffer sps;
|
private final NalUnitTargetBuffer sps;
|
||||||
private final NalUnitTargetBuffer pps;
|
private final NalUnitTargetBuffer pps;
|
||||||
private final NalUnitTargetBuffer sei;
|
private final NalUnitTargetBuffer sei;
|
||||||
@ -84,10 +89,11 @@ import java.util.List;
|
|||||||
private final ParsableByteArray seiWrapper;
|
private final ParsableByteArray seiWrapper;
|
||||||
private int[] scratchEscapePositions;
|
private int[] scratchEscapePositions;
|
||||||
|
|
||||||
public H264Reader(TrackOutput output, SeiReader seiReader) {
|
public H264Reader(TrackOutput output, SeiReader seiReader, boolean idrKeyframesOnly) {
|
||||||
super(output);
|
super(output);
|
||||||
this.seiReader = seiReader;
|
this.seiReader = seiReader;
|
||||||
prefixFlags = new boolean[3];
|
prefixFlags = new boolean[3];
|
||||||
|
ifrParserBuffer = (idrKeyframesOnly) ? null : new IfrParserBuffer();
|
||||||
sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128);
|
sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128);
|
||||||
pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128);
|
pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128);
|
||||||
sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128);
|
sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128);
|
||||||
@ -102,6 +108,9 @@ import java.util.List;
|
|||||||
sps.reset();
|
sps.reset();
|
||||||
pps.reset();
|
pps.reset();
|
||||||
sei.reset();
|
sei.reset();
|
||||||
|
if (ifrParserBuffer != null) {
|
||||||
|
ifrParserBuffer.reset();
|
||||||
|
}
|
||||||
writingSample = false;
|
writingSample = false;
|
||||||
totalBytesWritten = 0;
|
totalBytesWritten = 0;
|
||||||
}
|
}
|
||||||
@ -132,22 +141,30 @@ import java.util.List;
|
|||||||
|
|
||||||
int nalUnitType = H264Util.getNalUnitType(dataArray, nextNalUnitOffset);
|
int nalUnitType = H264Util.getNalUnitType(dataArray, nextNalUnitOffset);
|
||||||
int bytesWrittenPastNalUnit = limit - nextNalUnitOffset;
|
int bytesWrittenPastNalUnit = limit - nextNalUnitOffset;
|
||||||
if (nalUnitType == NAL_UNIT_TYPE_AUD) {
|
switch (nalUnitType) {
|
||||||
if (writingSample) {
|
case NAL_UNIT_TYPE_IDR:
|
||||||
if (isKeyframe && !hasOutputFormat && sps.isCompleted() && pps.isCompleted()) {
|
isKeyframe = true;
|
||||||
parseMediaFormat(sps, pps);
|
break;
|
||||||
|
case NAL_UNIT_TYPE_AUD:
|
||||||
|
if (writingSample) {
|
||||||
|
if (ifrParserBuffer != null && ifrParserBuffer.isCompleted()) {
|
||||||
|
int sliceType = ifrParserBuffer.getSliceType();
|
||||||
|
isKeyframe |= (sliceType == FRAME_TYPE_I || sliceType == FRAME_TYPE_ALL_I);
|
||||||
|
ifrParserBuffer.reset();
|
||||||
|
}
|
||||||
|
if (isKeyframe && !hasOutputFormat && sps.isCompleted() && pps.isCompleted()) {
|
||||||
|
parseMediaFormat(sps, pps);
|
||||||
|
}
|
||||||
|
int flags = isKeyframe ? C.SAMPLE_FLAG_SYNC : 0;
|
||||||
|
int size = (int) (totalBytesWritten - samplePosition) - bytesWrittenPastNalUnit;
|
||||||
|
output.sampleMetadata(sampleTimeUs, flags, size, bytesWrittenPastNalUnit, null);
|
||||||
|
writingSample = false;
|
||||||
}
|
}
|
||||||
int flags = isKeyframe ? C.SAMPLE_FLAG_SYNC : 0;
|
writingSample = true;
|
||||||
int size = (int) (totalBytesWritten - samplePosition) - bytesWrittenPastNalUnit;
|
samplePosition = totalBytesWritten - bytesWrittenPastNalUnit;
|
||||||
output.sampleMetadata(sampleTimeUs, flags, size, bytesWrittenPastNalUnit, null);
|
sampleTimeUs = pesTimeUs;
|
||||||
writingSample = false;
|
isKeyframe = false;
|
||||||
}
|
break;
|
||||||
writingSample = true;
|
|
||||||
isKeyframe = false;
|
|
||||||
sampleTimeUs = pesTimeUs;
|
|
||||||
samplePosition = totalBytesWritten - bytesWrittenPastNalUnit;
|
|
||||||
} else if (nalUnitType == NAL_UNIT_TYPE_IDR) {
|
|
||||||
isKeyframe = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the length to the start of the unit is negative then we wrote too many bytes to the
|
// If the length to the start of the unit is negative then we wrote too many bytes to the
|
||||||
@ -171,6 +188,9 @@ import java.util.List;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void feedNalUnitTargetBuffersStart(int nalUnitType) {
|
private void feedNalUnitTargetBuffersStart(int nalUnitType) {
|
||||||
|
if (ifrParserBuffer != null) {
|
||||||
|
ifrParserBuffer.startNalUnit(nalUnitType);
|
||||||
|
}
|
||||||
if (!hasOutputFormat) {
|
if (!hasOutputFormat) {
|
||||||
sps.startNalUnit(nalUnitType);
|
sps.startNalUnit(nalUnitType);
|
||||||
pps.startNalUnit(nalUnitType);
|
pps.startNalUnit(nalUnitType);
|
||||||
@ -179,6 +199,9 @@ import java.util.List;
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void feedNalUnitTargetBuffersData(byte[] dataArray, int offset, int limit) {
|
private void feedNalUnitTargetBuffersData(byte[] dataArray, int offset, int limit) {
|
||||||
|
if (ifrParserBuffer != null) {
|
||||||
|
ifrParserBuffer.appendToNalUnit(dataArray, offset, limit);
|
||||||
|
}
|
||||||
if (!hasOutputFormat) {
|
if (!hasOutputFormat) {
|
||||||
sps.appendToNalUnit(dataArray, offset, limit);
|
sps.appendToNalUnit(dataArray, offset, limit);
|
||||||
pps.appendToNalUnit(dataArray, offset, limit);
|
pps.appendToNalUnit(dataArray, offset, limit);
|
||||||
@ -461,4 +484,99 @@ import java.util.List;
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A buffer specifically for IFR units that can be used to parse the IFR's slice type.
|
||||||
|
*/
|
||||||
|
private static final class IfrParserBuffer {
|
||||||
|
|
||||||
|
private static final int DEFAULT_BUFFER_SIZE = 128;
|
||||||
|
private static final int NOT_SET = -1;
|
||||||
|
|
||||||
|
private final ParsableBitArray scratchSliceType;
|
||||||
|
|
||||||
|
private byte[] ifrData;
|
||||||
|
private int ifrLength;
|
||||||
|
private boolean isFilling;
|
||||||
|
private int sliceType;
|
||||||
|
|
||||||
|
public IfrParserBuffer() {
|
||||||
|
ifrData = new byte[DEFAULT_BUFFER_SIZE];
|
||||||
|
scratchSliceType = new ParsableBitArray(ifrData);
|
||||||
|
reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets the buffer, clearing any data that it holds.
|
||||||
|
*/
|
||||||
|
public void reset() {
|
||||||
|
isFilling = false;
|
||||||
|
ifrLength = 0;
|
||||||
|
sliceType = NOT_SET;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if enough data was added to the buffer that the slice type was determined.
|
||||||
|
*/
|
||||||
|
public boolean isCompleted() {
|
||||||
|
return sliceType != NOT_SET;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoked to indicate that a NAL unit has started, and if it is an IFR then the buffer will
|
||||||
|
* start.
|
||||||
|
*/
|
||||||
|
public void startNalUnit(int nalUnitType) {
|
||||||
|
if (nalUnitType == NAL_UNIT_TYPE_IFR) {
|
||||||
|
reset();
|
||||||
|
isFilling = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoked to pass stream data. The data passed should not include 4 byte NAL unit prefixes.
|
||||||
|
*
|
||||||
|
* @param data Holds the data being passed.
|
||||||
|
* @param offset The offset of the data in {@code data}.
|
||||||
|
* @param limit The limit (exclusive) of the data in {@code data}.
|
||||||
|
*/
|
||||||
|
public void appendToNalUnit(byte[] data, int offset, int limit) {
|
||||||
|
if (!isFilling) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int readLength = limit - offset;
|
||||||
|
if (ifrData.length < ifrLength + readLength) {
|
||||||
|
ifrData = Arrays.copyOf(ifrData, (ifrLength + readLength) * 2);
|
||||||
|
}
|
||||||
|
System.arraycopy(data, offset, ifrData, ifrLength, readLength);
|
||||||
|
ifrLength += readLength;
|
||||||
|
|
||||||
|
scratchSliceType.reset(ifrData, ifrLength);
|
||||||
|
// first_mb_in_slice
|
||||||
|
int len = scratchSliceType.peekExpGolombCodedNumLength();
|
||||||
|
if ((len == -1) || (len > scratchSliceType.bitsLeft())) {
|
||||||
|
// Not enough yet
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
scratchSliceType.skipBits(len);
|
||||||
|
// slice_type
|
||||||
|
len = scratchSliceType.peekExpGolombCodedNumLength();
|
||||||
|
if ((len == -1) || (len > scratchSliceType.bitsLeft())) {
|
||||||
|
// Not enough yet
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
sliceType = scratchSliceType.readUnsignedExpGolombCodedInt();
|
||||||
|
|
||||||
|
isFilling = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the slice type of the IFR.
|
||||||
|
*/
|
||||||
|
public int getSliceType() {
|
||||||
|
return sliceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,7 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
|
|
||||||
private final ParsableByteArray tsPacketBuffer;
|
private final ParsableByteArray tsPacketBuffer;
|
||||||
private final ParsableBitArray tsScratch;
|
private final ParsableBitArray tsScratch;
|
||||||
|
private final boolean idrKeyframesOnly;
|
||||||
private final long firstSampleTimestampUs;
|
private final long firstSampleTimestampUs;
|
||||||
/* package */ final SparseBooleanArray streamTypes;
|
/* package */ final SparseBooleanArray streamTypes;
|
||||||
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
|
/* package */ final SparseBooleanArray allowedPassthroughStreamTypes;
|
||||||
@ -65,11 +66,21 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
/* package */ Id3Reader id3Reader;
|
/* package */ Id3Reader id3Reader;
|
||||||
|
|
||||||
public TsExtractor() {
|
public TsExtractor() {
|
||||||
this(0, null);
|
this(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TsExtractor(long firstSampleTimestampUs) {
|
||||||
|
this(firstSampleTimestampUs, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
|
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities) {
|
||||||
|
this(firstSampleTimestampUs, audioCapabilities, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TsExtractor(long firstSampleTimestampUs, AudioCapabilities audioCapabilities,
|
||||||
|
boolean idrKeyframesOnly) {
|
||||||
this.firstSampleTimestampUs = firstSampleTimestampUs;
|
this.firstSampleTimestampUs = firstSampleTimestampUs;
|
||||||
|
this.idrKeyframesOnly = idrKeyframesOnly;
|
||||||
tsScratch = new ParsableBitArray(new byte[3]);
|
tsScratch = new ParsableBitArray(new byte[3]);
|
||||||
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
|
tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE);
|
||||||
streamTypes = new SparseBooleanArray();
|
streamTypes = new SparseBooleanArray();
|
||||||
@ -103,6 +114,8 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
return RESULT_END_OF_INPUT;
|
return RESULT_END_OF_INPUT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note: see ISO/IEC 13818-1, section 2.4.3.2 for detailed information on the format of
|
||||||
|
// the header.
|
||||||
tsPacketBuffer.setPosition(0);
|
tsPacketBuffer.setPosition(0);
|
||||||
tsPacketBuffer.setLimit(TS_PACKET_SIZE);
|
tsPacketBuffer.setLimit(TS_PACKET_SIZE);
|
||||||
int syncByte = tsPacketBuffer.readUnsignedByte();
|
int syncByte = tsPacketBuffer.readUnsignedByte();
|
||||||
@ -292,6 +305,8 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
data.skipBytes(pointerField);
|
data.skipBytes(pointerField);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Note: see ISO/IEC 13818-1, section 2.4.4.8 for detailed information on the format of
|
||||||
|
// the header.
|
||||||
data.readBytes(pmtScratch, 3);
|
data.readBytes(pmtScratch, 3);
|
||||||
pmtScratch.skipBits(12); // table_id (8), section_syntax_indicator (1), '0' (1), reserved (2)
|
pmtScratch.skipBits(12); // table_id (8), section_syntax_indicator (1), '0' (1), reserved (2)
|
||||||
int sectionLength = pmtScratch.readBits(12);
|
int sectionLength = pmtScratch.readBits(12);
|
||||||
@ -347,7 +362,8 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
break;
|
break;
|
||||||
case TS_STREAM_TYPE_H264:
|
case TS_STREAM_TYPE_H264:
|
||||||
SeiReader seiReader = new SeiReader(output.track(TS_STREAM_TYPE_EIA608));
|
SeiReader seiReader = new SeiReader(output.track(TS_STREAM_TYPE_EIA608));
|
||||||
pesPayloadReader = new H264Reader(output.track(TS_STREAM_TYPE_H264), seiReader);
|
pesPayloadReader = new H264Reader(output.track(TS_STREAM_TYPE_H264), seiReader,
|
||||||
|
idrKeyframesOnly);
|
||||||
break;
|
break;
|
||||||
case TS_STREAM_TYPE_ID3:
|
case TS_STREAM_TYPE_ID3:
|
||||||
pesPayloadReader = id3Reader;
|
pesPayloadReader = id3Reader;
|
||||||
@ -502,6 +518,8 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean parseHeader() {
|
private boolean parseHeader() {
|
||||||
|
// Note: see ISO/IEC 13818-1, section 2.4.3.6 for detailed information on the format of
|
||||||
|
// the header.
|
||||||
pesScratch.setPosition(0);
|
pesScratch.setPosition(0);
|
||||||
int startCodePrefix = pesScratch.readBits(24);
|
int startCodePrefix = pesScratch.readBits(24);
|
||||||
if (startCodePrefix != 0x000001) {
|
if (startCodePrefix != 0x000001) {
|
||||||
@ -534,7 +552,7 @@ public final class TsExtractor implements Extractor, SeekMap {
|
|||||||
pesScratch.setPosition(0);
|
pesScratch.setPosition(0);
|
||||||
timeUs = 0;
|
timeUs = 0;
|
||||||
if (ptsFlag) {
|
if (ptsFlag) {
|
||||||
pesScratch.skipBits(4); // '0010'
|
pesScratch.skipBits(4); // '0010' or '0011'
|
||||||
long pts = (long) pesScratch.readBits(3) << 30;
|
long pts = (long) pesScratch.readBits(3) << 30;
|
||||||
pesScratch.skipBits(1); // marker_bit
|
pesScratch.skipBits(1); // marker_bit
|
||||||
pts |= pesScratch.readBits(15) << 15;
|
pts |= pesScratch.readBits(15) << 15;
|
||||||
|
@ -61,7 +61,6 @@ public final class HlsExtractorWrapper implements ExtractorOutput {
|
|||||||
this.extractor = extractor;
|
this.extractor = extractor;
|
||||||
this.shouldSpliceIn = shouldSpliceIn;
|
this.shouldSpliceIn = shouldSpliceIn;
|
||||||
sampleQueues = new SparseArray<DefaultTrackOutput>();
|
sampleQueues = new SparseArray<DefaultTrackOutput>();
|
||||||
extractor.init(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -71,6 +70,7 @@ public final class HlsExtractorWrapper implements ExtractorOutput {
|
|||||||
*/
|
*/
|
||||||
public void init(Allocator allocator) {
|
public void init(Allocator allocator) {
|
||||||
this.allocator = allocator;
|
this.allocator = allocator;
|
||||||
|
this.extractor.init(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -72,7 +72,7 @@ public final class HlsPlaylistParser implements UriLoadable.Parser<HlsPlaylist>
|
|||||||
private static final Pattern CODECS_ATTR_REGEX =
|
private static final Pattern CODECS_ATTR_REGEX =
|
||||||
Pattern.compile(CODECS_ATTR + "=\"(.+?)\"");
|
Pattern.compile(CODECS_ATTR + "=\"(.+?)\"");
|
||||||
private static final Pattern RESOLUTION_ATTR_REGEX =
|
private static final Pattern RESOLUTION_ATTR_REGEX =
|
||||||
Pattern.compile(RESOLUTION_ATTR + "=(\\d+(\\.\\d+)?x\\d+(\\.\\d+)?)");
|
Pattern.compile(RESOLUTION_ATTR + "=(\\d+x\\d+)");
|
||||||
private static final Pattern MEDIA_DURATION_REGEX =
|
private static final Pattern MEDIA_DURATION_REGEX =
|
||||||
Pattern.compile(MEDIA_DURATION_TAG + ":([\\d.]+),");
|
Pattern.compile(MEDIA_DURATION_TAG + ":([\\d.]+),");
|
||||||
private static final Pattern MEDIA_SEQUENCE_REGEX =
|
private static final Pattern MEDIA_SEQUENCE_REGEX =
|
||||||
@ -168,12 +168,12 @@ public final class HlsPlaylistParser implements UriLoadable.Parser<HlsPlaylist>
|
|||||||
RESOLUTION_ATTR_REGEX);
|
RESOLUTION_ATTR_REGEX);
|
||||||
if (resolutionString != null) {
|
if (resolutionString != null) {
|
||||||
String[] widthAndHeight = resolutionString.split("x");
|
String[] widthAndHeight = resolutionString.split("x");
|
||||||
width = Math.round(Float.parseFloat(widthAndHeight[0]));
|
width = Integer.parseInt(widthAndHeight[0]);
|
||||||
if (width <= 0) {
|
if (width <= 0) {
|
||||||
// Width was invalid.
|
// Width was invalid.
|
||||||
width = -1;
|
width = -1;
|
||||||
}
|
}
|
||||||
height = Math.round(Float.parseFloat(widthAndHeight[1]));
|
height = Integer.parseInt(widthAndHeight[1]);
|
||||||
if (height <= 0) {
|
if (height <= 0) {
|
||||||
// Height was invalid.
|
// Height was invalid.
|
||||||
height = -1;
|
height = -1;
|
||||||
|
@ -0,0 +1,53 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer.text;
|
||||||
|
|
||||||
|
import android.text.Layout.Alignment;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contains information about a specific cue, including textual content and formatting data.
|
||||||
|
*/
|
||||||
|
public class Cue {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used by some methods to indicate that no value is set.
|
||||||
|
*/
|
||||||
|
public static final int UNSET_VALUE = -1;
|
||||||
|
|
||||||
|
public final CharSequence text;
|
||||||
|
|
||||||
|
public final int line;
|
||||||
|
public final int position;
|
||||||
|
public final Alignment alignment;
|
||||||
|
public final int size;
|
||||||
|
|
||||||
|
public Cue() {
|
||||||
|
this(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Cue(CharSequence text) {
|
||||||
|
this(text, UNSET_VALUE, UNSET_VALUE, null, UNSET_VALUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Cue(CharSequence text, int line, int position, Alignment alignment, int size) {
|
||||||
|
this.text = text;
|
||||||
|
this.line = line;
|
||||||
|
this.position = position;
|
||||||
|
this.alignment = alignment;
|
||||||
|
this.size = size;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -15,6 +15,8 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.text;
|
package com.google.android.exoplayer.text;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A subtitle that contains textual data associated with time indices.
|
* A subtitle that contains textual data associated with time indices.
|
||||||
*/
|
*/
|
||||||
@ -39,8 +41,8 @@ public interface Subtitle {
|
|||||||
public int getNextEventTimeIndex(long timeUs);
|
public int getNextEventTimeIndex(long timeUs);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the number of event times, where events are defined as points in time at which the text
|
* Gets the number of event times, where events are defined as points in time at which the cues
|
||||||
* returned by {@link #getText(long)} changes.
|
* returned by {@link #getCues(long)} changes.
|
||||||
*
|
*
|
||||||
* @return The number of event times.
|
* @return The number of event times.
|
||||||
*/
|
*/
|
||||||
@ -62,11 +64,11 @@ public interface Subtitle {
|
|||||||
public long getLastEventTime();
|
public long getLastEventTime();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve the subtitle text that should be displayed at a given time.
|
* Retrieve the subtitle cues that should be displayed at a given time.
|
||||||
*
|
*
|
||||||
* @param timeUs The time in microseconds.
|
* @param timeUs The time in microseconds.
|
||||||
* @return The text that should be displayed, or null.
|
* @return A list of cues that should be displayed, possibly empty.
|
||||||
*/
|
*/
|
||||||
public String getText(long timeUs);
|
public List<Cue> getCues(long timeUs);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,189 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer.text;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.text.Layout.Alignment;
|
||||||
|
import android.util.AttributeSet;
|
||||||
|
import android.view.ViewGroup;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A view for rendering rich-formatted captions.
|
||||||
|
*/
|
||||||
|
public final class SubtitleLayout extends ViewGroup {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use the same line height ratio as WebVtt to match the display with the preview.
|
||||||
|
* WebVtt specifies line height as 5.3% of the viewport height.
|
||||||
|
*/
|
||||||
|
private static final float LINE_HEIGHT_RATIO = 0.0533f;
|
||||||
|
|
||||||
|
private final List<SubtitleView> subtitleViews;
|
||||||
|
|
||||||
|
private List<Cue> subtitleCues;
|
||||||
|
private int viewsInUse;
|
||||||
|
|
||||||
|
private float fontScale;
|
||||||
|
private float textSize;
|
||||||
|
private CaptionStyleCompat captionStyle;
|
||||||
|
|
||||||
|
public SubtitleLayout(Context context) {
|
||||||
|
this(context, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SubtitleLayout(Context context, AttributeSet attrs) {
|
||||||
|
super(context, attrs);
|
||||||
|
subtitleViews = new ArrayList<SubtitleView>();
|
||||||
|
fontScale = 1;
|
||||||
|
captionStyle = CaptionStyleCompat.DEFAULT;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the cues to be displayed by the view.
|
||||||
|
*
|
||||||
|
* @param cues The cues to display.
|
||||||
|
*/
|
||||||
|
public void setCues(List<Cue> cues) {
|
||||||
|
subtitleCues = cues;
|
||||||
|
int size = (cues == null) ? 0 : cues.size();
|
||||||
|
|
||||||
|
// create new subtitle views if necessary
|
||||||
|
if (size > subtitleViews.size()) {
|
||||||
|
for (int i = subtitleViews.size(); i < size; i++) {
|
||||||
|
SubtitleView newView = createSubtitleView();
|
||||||
|
subtitleViews.add(newView);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// add the views we currently need, if necessary
|
||||||
|
for (int i = viewsInUse; i < size; i++) {
|
||||||
|
addView(subtitleViews.get(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove the views we don't currently need, if necessary
|
||||||
|
for (int i = size; i < viewsInUse; i++) {
|
||||||
|
removeView(subtitleViews.get(i));
|
||||||
|
}
|
||||||
|
|
||||||
|
viewsInUse = size;
|
||||||
|
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
subtitleViews.get(i).setText(cues.get(i).text);
|
||||||
|
}
|
||||||
|
|
||||||
|
requestLayout();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the scale of the font.
|
||||||
|
*
|
||||||
|
* @param scale The scale of the font.
|
||||||
|
*/
|
||||||
|
public void setFontScale(float scale) {
|
||||||
|
fontScale = scale;
|
||||||
|
updateSubtitlesTextSize();
|
||||||
|
|
||||||
|
for (SubtitleView subtitleView : subtitleViews) {
|
||||||
|
subtitleView.setTextSize(textSize);
|
||||||
|
}
|
||||||
|
requestLayout();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures the view according to the given style.
|
||||||
|
*
|
||||||
|
* @param captionStyle A style for the view.
|
||||||
|
*/
|
||||||
|
public void setStyle(CaptionStyleCompat captionStyle) {
|
||||||
|
this.captionStyle = captionStyle;
|
||||||
|
|
||||||
|
for (SubtitleView subtitleView : subtitleViews) {
|
||||||
|
subtitleView.setStyle(captionStyle);
|
||||||
|
}
|
||||||
|
requestLayout();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||||
|
int width = MeasureSpec.getSize(widthMeasureSpec);
|
||||||
|
int height = MeasureSpec.getSize(heightMeasureSpec);
|
||||||
|
setMeasuredDimension(width, height);
|
||||||
|
|
||||||
|
updateSubtitlesTextSize();
|
||||||
|
|
||||||
|
for (int i = 0; i < viewsInUse; i++) {
|
||||||
|
subtitleViews.get(i).setTextSize(textSize);
|
||||||
|
subtitleViews.get(i).measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST),
|
||||||
|
MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
|
||||||
|
int width = right - left;
|
||||||
|
int height = bottom - top;
|
||||||
|
|
||||||
|
for (int i = 0; i < viewsInUse; i++) {
|
||||||
|
SubtitleView subtitleView = subtitleViews.get(i);
|
||||||
|
Cue subtitleCue = subtitleCues.get(i);
|
||||||
|
|
||||||
|
int viewLeft = (width - subtitleView.getMeasuredWidth()) / 2;
|
||||||
|
int viewRight = viewLeft + subtitleView.getMeasuredWidth();
|
||||||
|
int viewTop = bottom - subtitleView.getMeasuredHeight();
|
||||||
|
int viewBottom = bottom;
|
||||||
|
|
||||||
|
if (subtitleCue.alignment != null) {
|
||||||
|
subtitleView.setTextAlignment(subtitleCue.alignment);
|
||||||
|
} else {
|
||||||
|
subtitleView.setTextAlignment(Alignment.ALIGN_CENTER);
|
||||||
|
}
|
||||||
|
if (subtitleCue.position != Cue.UNSET_VALUE) {
|
||||||
|
if (subtitleCue.alignment == Alignment.ALIGN_OPPOSITE) {
|
||||||
|
viewRight = (int) ((width * (double) subtitleCue.position) / 100) + left;
|
||||||
|
viewLeft = Math.max(viewRight - subtitleView.getMeasuredWidth(), left);
|
||||||
|
} else {
|
||||||
|
viewLeft = (int) ((width * (double) subtitleCue.position) / 100) + left;
|
||||||
|
viewRight = Math.min(viewLeft + subtitleView.getMeasuredWidth(), right);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (subtitleCue.line != Cue.UNSET_VALUE) {
|
||||||
|
viewTop = (int) (height * (double) subtitleCue.line / 100) + top;
|
||||||
|
viewBottom = viewTop + subtitleView.getMeasuredHeight();
|
||||||
|
if (viewBottom > bottom) {
|
||||||
|
viewTop = bottom - subtitleView.getMeasuredHeight();
|
||||||
|
viewBottom = bottom;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
subtitleView.layout(viewLeft, viewTop, viewRight, viewBottom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateSubtitlesTextSize() {
|
||||||
|
textSize = LINE_HEIGHT_RATIO * getHeight() * fontScale;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SubtitleView createSubtitleView() {
|
||||||
|
SubtitleView view = new SubtitleView(getContext());
|
||||||
|
view.setStyle(captionStyle);
|
||||||
|
view.setTextSize(textSize);
|
||||||
|
return view;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -28,6 +28,7 @@ import android.graphics.Paint.Join;
|
|||||||
import android.graphics.Paint.Style;
|
import android.graphics.Paint.Style;
|
||||||
import android.graphics.RectF;
|
import android.graphics.RectF;
|
||||||
import android.graphics.Typeface;
|
import android.graphics.Typeface;
|
||||||
|
import android.text.Layout.Alignment;
|
||||||
import android.text.StaticLayout;
|
import android.text.StaticLayout;
|
||||||
import android.text.TextPaint;
|
import android.text.TextPaint;
|
||||||
import android.util.AttributeSet;
|
import android.util.AttributeSet;
|
||||||
@ -35,10 +36,7 @@ import android.util.DisplayMetrics;
|
|||||||
import android.view.View;
|
import android.view.View;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A view for rendering captions.
|
* A view for rendering a single caption.
|
||||||
* <p>
|
|
||||||
* The caption style and text size can be configured using {@link #setStyle(CaptionStyleCompat)} and
|
|
||||||
* {@link #setTextSize(float)} respectively.
|
|
||||||
*/
|
*/
|
||||||
public class SubtitleView extends View {
|
public class SubtitleView extends View {
|
||||||
|
|
||||||
@ -52,11 +50,6 @@ public class SubtitleView extends View {
|
|||||||
*/
|
*/
|
||||||
private final RectF lineBounds = new RectF();
|
private final RectF lineBounds = new RectF();
|
||||||
|
|
||||||
/**
|
|
||||||
* Reusable string builder used for holding text.
|
|
||||||
*/
|
|
||||||
private final StringBuilder textBuilder = new StringBuilder();
|
|
||||||
|
|
||||||
// Styled dimensions.
|
// Styled dimensions.
|
||||||
private final float cornerRadius;
|
private final float cornerRadius;
|
||||||
private final float outlineWidth;
|
private final float outlineWidth;
|
||||||
@ -66,6 +59,8 @@ public class SubtitleView extends View {
|
|||||||
private TextPaint textPaint;
|
private TextPaint textPaint;
|
||||||
private Paint paint;
|
private Paint paint;
|
||||||
|
|
||||||
|
private CharSequence text;
|
||||||
|
|
||||||
private int foregroundColor;
|
private int foregroundColor;
|
||||||
private int backgroundColor;
|
private int backgroundColor;
|
||||||
private int edgeColor;
|
private int edgeColor;
|
||||||
@ -75,10 +70,15 @@ public class SubtitleView extends View {
|
|||||||
private int lastMeasuredWidth;
|
private int lastMeasuredWidth;
|
||||||
private StaticLayout layout;
|
private StaticLayout layout;
|
||||||
|
|
||||||
|
private Alignment alignment;
|
||||||
private float spacingMult;
|
private float spacingMult;
|
||||||
private float spacingAdd;
|
private float spacingAdd;
|
||||||
private int innerPaddingX;
|
private int innerPaddingX;
|
||||||
|
|
||||||
|
public SubtitleView(Context context) {
|
||||||
|
this(context, null);
|
||||||
|
}
|
||||||
|
|
||||||
public SubtitleView(Context context, AttributeSet attrs) {
|
public SubtitleView(Context context, AttributeSet attrs) {
|
||||||
this(context, attrs, 0);
|
this(context, attrs, 0);
|
||||||
}
|
}
|
||||||
@ -107,6 +107,8 @@ public class SubtitleView extends View {
|
|||||||
textPaint.setAntiAlias(true);
|
textPaint.setAntiAlias(true);
|
||||||
textPaint.setSubpixelText(true);
|
textPaint.setSubpixelText(true);
|
||||||
|
|
||||||
|
alignment = Alignment.ALIGN_CENTER;
|
||||||
|
|
||||||
paint = new Paint();
|
paint = new Paint();
|
||||||
paint.setAntiAlias(true);
|
paint.setAntiAlias(true);
|
||||||
|
|
||||||
@ -116,10 +118,6 @@ public class SubtitleView extends View {
|
|||||||
setStyle(CaptionStyleCompat.DEFAULT);
|
setStyle(CaptionStyleCompat.DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SubtitleView(Context context) {
|
|
||||||
this(context, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setBackgroundColor(int color) {
|
public void setBackgroundColor(int color) {
|
||||||
backgroundColor = color;
|
backgroundColor = color;
|
||||||
@ -132,8 +130,7 @@ public class SubtitleView extends View {
|
|||||||
* @param text The text to display.
|
* @param text The text to display.
|
||||||
*/
|
*/
|
||||||
public void setText(CharSequence text) {
|
public void setText(CharSequence text) {
|
||||||
textBuilder.setLength(0);
|
this.text = text;
|
||||||
textBuilder.append(text);
|
|
||||||
forceUpdate(true);
|
forceUpdate(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,6 +147,15 @@ public class SubtitleView extends View {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the text alignment.
|
||||||
|
*
|
||||||
|
* @param textAlignment The text alignment.
|
||||||
|
*/
|
||||||
|
public void setTextAlignment(Alignment textAlignment) {
|
||||||
|
alignment = textAlignment;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configures the view according to the given style.
|
* Configures the view according to the given style.
|
||||||
*
|
*
|
||||||
@ -227,8 +233,7 @@ public class SubtitleView extends View {
|
|||||||
|
|
||||||
hasMeasurements = true;
|
hasMeasurements = true;
|
||||||
lastMeasuredWidth = maxWidth;
|
lastMeasuredWidth = maxWidth;
|
||||||
layout = new StaticLayout(textBuilder, textPaint, maxWidth, null, spacingMult, spacingAdd,
|
layout = new StaticLayout(text, textPaint, maxWidth, alignment, spacingMult, spacingAdd, true);
|
||||||
true);
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,16 +15,18 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.text;
|
package com.google.android.exoplayer.text;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An interface for components that render text.
|
* An interface for components that render text.
|
||||||
*/
|
*/
|
||||||
public interface TextRenderer {
|
public interface TextRenderer {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoked each time there is a change in the text to be rendered.
|
* Invoked each time there is a change in the {@link Cue}s to be rendered.
|
||||||
*
|
*
|
||||||
* @param text The text to render, or null if no text is to be rendered.
|
* @param cues The {@link Cue}s to be rendered, or an empty list if no cues are to be rendered.
|
||||||
*/
|
*/
|
||||||
void onText(String text);
|
void onCues(List<Cue> cues);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,8 @@ import android.os.Looper;
|
|||||||
import android.os.Message;
|
import android.os.Message;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A {@link TrackRenderer} for textual subtitles. The actual rendering of each line of text to a
|
* A {@link TrackRenderer} for textual subtitles. The actual rendering of each line of text to a
|
||||||
@ -255,34 +257,36 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateTextRenderer(long positionUs) {
|
private void updateTextRenderer(long positionUs) {
|
||||||
String text = subtitle.getText(positionUs);
|
List<Cue> cues = subtitle.getCues(positionUs);
|
||||||
if (textRendererHandler != null) {
|
if (textRendererHandler != null) {
|
||||||
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, text).sendToTarget();
|
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, cues).sendToTarget();
|
||||||
} else {
|
} else {
|
||||||
invokeRendererInternal(text);
|
invokeRendererInternalCues(cues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void clearTextRenderer() {
|
private void clearTextRenderer() {
|
||||||
if (textRendererHandler != null) {
|
if (textRendererHandler != null) {
|
||||||
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, null).sendToTarget();
|
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, Collections.<Cue>emptyList())
|
||||||
|
.sendToTarget();
|
||||||
} else {
|
} else {
|
||||||
invokeRendererInternal(null);
|
invokeRendererInternalCues(Collections.<Cue>emptyList());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public boolean handleMessage(Message msg) {
|
public boolean handleMessage(Message msg) {
|
||||||
switch (msg.what) {
|
switch (msg.what) {
|
||||||
case MSG_UPDATE_OVERLAY:
|
case MSG_UPDATE_OVERLAY:
|
||||||
invokeRendererInternal((String) msg.obj);
|
invokeRendererInternalCues((List<Cue>) msg.obj);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void invokeRendererInternal(String text) {
|
private void invokeRendererInternalCues(List<Cue> cues) {
|
||||||
textRenderer.onText(text);
|
textRenderer.onCues(cues);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
|
|||||||
import com.google.android.exoplayer.SampleHolder;
|
import com.google.android.exoplayer.SampleHolder;
|
||||||
import com.google.android.exoplayer.SampleSource;
|
import com.google.android.exoplayer.SampleSource;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
import com.google.android.exoplayer.text.TextRenderer;
|
import com.google.android.exoplayer.text.TextRenderer;
|
||||||
import com.google.android.exoplayer.util.Assertions;
|
import com.google.android.exoplayer.util.Assertions;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
@ -31,6 +32,7 @@ import android.os.Looper;
|
|||||||
import android.os.Message;
|
import android.os.Message;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -227,8 +229,9 @@ public class Eia608TrackRenderer extends TrackRenderer implements Callback {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void invokeRendererInternal(String text) {
|
private void invokeRendererInternal(String cueText) {
|
||||||
textRenderer.onText(text);
|
Cue cue = new Cue(cueText);
|
||||||
|
textRenderer.onCues(Collections.singletonList(cue));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void maybeParsePendingSample() {
|
private void maybeParsePendingSample() {
|
||||||
|
@ -15,9 +15,13 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.text.ttml;
|
package com.google.android.exoplayer.text.ttml;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
import com.google.android.exoplayer.text.Subtitle;
|
import com.google.android.exoplayer.text.Subtitle;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A representation of a TTML subtitle.
|
* A representation of a TTML subtitle.
|
||||||
*/
|
*/
|
||||||
@ -60,8 +64,14 @@ public final class TtmlSubtitle implements Subtitle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getText(long timeUs) {
|
public List<Cue> getCues(long timeUs) {
|
||||||
return root.getText(timeUs - startTimeUs);
|
String cueText = root.getText(timeUs - startTimeUs);
|
||||||
|
if (cueText == null) {
|
||||||
|
return Collections.<Cue>emptyList();
|
||||||
|
} else {
|
||||||
|
Cue cue = new Cue(cueText);
|
||||||
|
return Collections.singletonList(cue);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,55 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer.text.webvtt;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
|
|
||||||
|
import android.text.Layout.Alignment;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A representation of a WebVTT cue.
|
||||||
|
*/
|
||||||
|
/* package */ final class WebvttCue extends Cue {
|
||||||
|
|
||||||
|
public final long startTime;
|
||||||
|
public final long endTime;
|
||||||
|
|
||||||
|
public WebvttCue(CharSequence text) {
|
||||||
|
this(Cue.UNSET_VALUE, Cue.UNSET_VALUE, text);
|
||||||
|
}
|
||||||
|
|
||||||
|
public WebvttCue(long startTime, long endTime, CharSequence text) {
|
||||||
|
this(startTime, endTime, text, Cue.UNSET_VALUE, Cue.UNSET_VALUE, null, Cue.UNSET_VALUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public WebvttCue(long startTime, long endTime, CharSequence text, int line, int position,
|
||||||
|
Alignment alignment, int size) {
|
||||||
|
super(text, line, position, alignment, size);
|
||||||
|
this.startTime = startTime;
|
||||||
|
this.endTime = endTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether or not this cue should be placed in the default position and rolled-up with
|
||||||
|
* the other "normal" cues.
|
||||||
|
*
|
||||||
|
* @return True if this cue should be placed in the default position; false otherwise.
|
||||||
|
*/
|
||||||
|
public boolean isNormalCue() {
|
||||||
|
return (line == UNSET_VALUE && position == UNSET_VALUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -17,9 +17,14 @@ package com.google.android.exoplayer.text.webvtt;
|
|||||||
|
|
||||||
import com.google.android.exoplayer.C;
|
import com.google.android.exoplayer.C;
|
||||||
import com.google.android.exoplayer.ParserException;
|
import com.google.android.exoplayer.ParserException;
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
import com.google.android.exoplayer.text.SubtitleParser;
|
import com.google.android.exoplayer.text.SubtitleParser;
|
||||||
import com.google.android.exoplayer.util.MimeTypes;
|
import com.google.android.exoplayer.util.MimeTypes;
|
||||||
|
|
||||||
|
import android.text.Html;
|
||||||
|
import android.text.Layout.Alignment;
|
||||||
|
import android.util.Log;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@ -35,6 +40,8 @@ import java.util.regex.Pattern;
|
|||||||
*/
|
*/
|
||||||
public class WebvttParser implements SubtitleParser {
|
public class WebvttParser implements SubtitleParser {
|
||||||
|
|
||||||
|
static final String TAG = "WebvttParser";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This parser allows a custom header to be prepended to the WebVTT data, in the form of a text
|
* This parser allows a custom header to be prepended to the WebVTT data, in the form of a text
|
||||||
* line starting with this string.
|
* line starting with this string.
|
||||||
@ -63,21 +70,26 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
private static final String WEBVTT_TIMESTAMP_STRING = "(\\d+:)?[0-5]\\d:[0-5]\\d\\.\\d{3}";
|
private static final String WEBVTT_TIMESTAMP_STRING = "(\\d+:)?[0-5]\\d:[0-5]\\d\\.\\d{3}";
|
||||||
private static final Pattern WEBVTT_TIMESTAMP = Pattern.compile(WEBVTT_TIMESTAMP_STRING);
|
private static final Pattern WEBVTT_TIMESTAMP = Pattern.compile(WEBVTT_TIMESTAMP_STRING);
|
||||||
|
|
||||||
|
private static final String WEBVTT_CUE_SETTING_STRING = "\\S*:\\S*";
|
||||||
|
private static final Pattern WEBVTT_CUE_SETTING = Pattern.compile(WEBVTT_CUE_SETTING_STRING);
|
||||||
|
|
||||||
private static final Pattern MEDIA_TIMESTAMP_OFFSET = Pattern.compile(OFFSET + "\\d+");
|
private static final Pattern MEDIA_TIMESTAMP_OFFSET = Pattern.compile(OFFSET + "\\d+");
|
||||||
private static final Pattern MEDIA_TIMESTAMP = Pattern.compile("MPEGTS:\\d+");
|
private static final Pattern MEDIA_TIMESTAMP = Pattern.compile("MPEGTS:\\d+");
|
||||||
|
|
||||||
private static final String WEBVTT_CUE_TAG_STRING = "\\<.*?>";
|
private static final String NON_NUMERIC_STRING = ".*[^0-9].*";
|
||||||
|
|
||||||
|
private final StringBuilder textBuilder;
|
||||||
|
|
||||||
private final boolean strictParsing;
|
private final boolean strictParsing;
|
||||||
private final boolean filterTags;
|
|
||||||
|
|
||||||
public WebvttParser() {
|
public WebvttParser() {
|
||||||
this(true, true);
|
this(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
public WebvttParser(boolean strictParsing, boolean filterTags) {
|
public WebvttParser(boolean strictParsing) {
|
||||||
this.strictParsing = strictParsing;
|
this.strictParsing = strictParsing;
|
||||||
this.filterTags = filterTags;
|
|
||||||
|
textBuilder = new StringBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -145,6 +157,7 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
|
|
||||||
// process the cues and text
|
// process the cues and text
|
||||||
while ((line = webvttData.readLine()) != null) {
|
while ((line = webvttData.readLine()) != null) {
|
||||||
|
|
||||||
// parse the cue identifier (if present) {
|
// parse the cue identifier (if present) {
|
||||||
Matcher matcher = WEBVTT_CUE_IDENTIFIER.matcher(line);
|
Matcher matcher = WEBVTT_CUE_IDENTIFIER.matcher(line);
|
||||||
if (matcher.find()) {
|
if (matcher.find()) {
|
||||||
@ -152,11 +165,16 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
line = webvttData.readLine();
|
line = webvttData.readLine();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
long startTime = Cue.UNSET_VALUE;
|
||||||
|
long endTime = Cue.UNSET_VALUE;
|
||||||
|
CharSequence text = null;
|
||||||
|
int lineNum = Cue.UNSET_VALUE;
|
||||||
|
int position = Cue.UNSET_VALUE;
|
||||||
|
Alignment alignment = null;
|
||||||
|
int size = Cue.UNSET_VALUE;
|
||||||
|
|
||||||
// parse the cue timestamps
|
// parse the cue timestamps
|
||||||
matcher = WEBVTT_TIMESTAMP.matcher(line);
|
matcher = WEBVTT_TIMESTAMP.matcher(line);
|
||||||
long startTime;
|
|
||||||
long endTime;
|
|
||||||
String text = "";
|
|
||||||
|
|
||||||
// parse start timestamp
|
// parse start timestamp
|
||||||
if (!matcher.find()) {
|
if (!matcher.find()) {
|
||||||
@ -166,36 +184,76 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// parse end timestamp
|
// parse end timestamp
|
||||||
|
String endTimeString;
|
||||||
if (!matcher.find()) {
|
if (!matcher.find()) {
|
||||||
throw new ParserException("Expected cue end time: " + line);
|
throw new ParserException("Expected cue end time: " + line);
|
||||||
} else {
|
} else {
|
||||||
endTime = parseTimestampUs(matcher.group()) + mediaTimestampUs;
|
endTimeString = matcher.group();
|
||||||
|
endTime = parseTimestampUs(endTimeString) + mediaTimestampUs;
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse the (optional) cue setting list
|
||||||
|
line = line.substring(line.indexOf(endTimeString) + endTimeString.length());
|
||||||
|
matcher = WEBVTT_CUE_SETTING.matcher(line);
|
||||||
|
while (matcher.find()) {
|
||||||
|
String match = matcher.group();
|
||||||
|
String[] parts = match.split(":", 2);
|
||||||
|
String name = parts[0];
|
||||||
|
String value = parts[1];
|
||||||
|
|
||||||
|
try {
|
||||||
|
if ("line".equals(name)) {
|
||||||
|
if (value.endsWith("%")) {
|
||||||
|
lineNum = parseIntPercentage(value);
|
||||||
|
} else if (value.matches(NON_NUMERIC_STRING)) {
|
||||||
|
Log.w(TAG, "Invalid line value: " + value);
|
||||||
|
} else {
|
||||||
|
lineNum = Integer.parseInt(value);
|
||||||
|
}
|
||||||
|
} else if ("align".equals(name)) {
|
||||||
|
// TODO: handle for RTL languages
|
||||||
|
if ("start".equals(value)) {
|
||||||
|
alignment = Alignment.ALIGN_NORMAL;
|
||||||
|
} else if ("middle".equals(value)) {
|
||||||
|
alignment = Alignment.ALIGN_CENTER;
|
||||||
|
} else if ("end".equals(value)) {
|
||||||
|
alignment = Alignment.ALIGN_OPPOSITE;
|
||||||
|
} else if ("left".equals(value)) {
|
||||||
|
alignment = Alignment.ALIGN_NORMAL;
|
||||||
|
} else if ("right".equals(value)) {
|
||||||
|
alignment = Alignment.ALIGN_OPPOSITE;
|
||||||
|
} else {
|
||||||
|
Log.w(TAG, "Invalid align value: " + value);
|
||||||
|
}
|
||||||
|
} else if ("position".equals(name)) {
|
||||||
|
position = parseIntPercentage(value);
|
||||||
|
} else if ("size".equals(name)) {
|
||||||
|
size = parseIntPercentage(value);
|
||||||
|
} else {
|
||||||
|
Log.w(TAG, "Unknown cue setting " + name + ":" + value);
|
||||||
|
}
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
Log.w(TAG, name + " contains an invalid value " + value, e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse text
|
// parse text
|
||||||
|
textBuilder.setLength(0);
|
||||||
while (((line = webvttData.readLine()) != null) && (!line.isEmpty())) {
|
while (((line = webvttData.readLine()) != null) && (!line.isEmpty())) {
|
||||||
text += processCueText(line.trim()) + "\n";
|
if (textBuilder.length() > 0) {
|
||||||
|
textBuilder.append("<br>");
|
||||||
|
}
|
||||||
|
textBuilder.append(line.trim());
|
||||||
}
|
}
|
||||||
|
text = Html.fromHtml(textBuilder.toString());
|
||||||
|
|
||||||
WebvttCue cue = new WebvttCue(startTime, endTime, text);
|
WebvttCue cue = new WebvttCue(startTime, endTime, text, lineNum, position, alignment, size);
|
||||||
subtitles.add(cue);
|
subtitles.add(cue);
|
||||||
}
|
}
|
||||||
|
|
||||||
webvttData.close();
|
webvttData.close();
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
|
WebvttSubtitle subtitle = new WebvttSubtitle(subtitles, mediaTimestampUs);
|
||||||
// copy WebvttCue data into arrays for WebvttSubtitle constructor
|
|
||||||
String[] cueText = new String[subtitles.size()];
|
|
||||||
long[] cueTimesUs = new long[2 * subtitles.size()];
|
|
||||||
for (int subtitleIndex = 0; subtitleIndex < subtitles.size(); subtitleIndex++) {
|
|
||||||
int arrayIndex = subtitleIndex * 2;
|
|
||||||
WebvttCue cue = subtitles.get(subtitleIndex);
|
|
||||||
cueTimesUs[arrayIndex] = cue.startTime;
|
|
||||||
cueTimesUs[arrayIndex + 1] = cue.endTime;
|
|
||||||
cueText[subtitleIndex] = cue.text;
|
|
||||||
}
|
|
||||||
|
|
||||||
WebvttSubtitle subtitle = new WebvttSubtitle(cueText, mediaTimestampUs, cueTimesUs);
|
|
||||||
return subtitle;
|
return subtitle;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -208,25 +266,29 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
return startTimeUs;
|
return startTimeUs;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String processCueText(String line) {
|
|
||||||
if (filterTags) {
|
|
||||||
line = line.replaceAll(WEBVTT_CUE_TAG_STRING, "");
|
|
||||||
line = line.replaceAll("<", "<");
|
|
||||||
line = line.replaceAll(">", ">");
|
|
||||||
line = line.replaceAll(" ", " ");
|
|
||||||
line = line.replaceAll("&", "&");
|
|
||||||
return line;
|
|
||||||
} else {
|
|
||||||
return line;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void handleNoncompliantLine(String line) throws ParserException {
|
protected void handleNoncompliantLine(String line) throws ParserException {
|
||||||
if (strictParsing) {
|
if (strictParsing) {
|
||||||
throw new ParserException("Unexpected line: " + line);
|
throw new ParserException("Unexpected line: " + line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static int parseIntPercentage(String s) throws NumberFormatException {
|
||||||
|
if (!s.endsWith("%")) {
|
||||||
|
throw new NumberFormatException(s + " doesn't end with '%'");
|
||||||
|
}
|
||||||
|
|
||||||
|
s = s.substring(0, s.length() - 1);
|
||||||
|
if (s.matches(NON_NUMERIC_STRING)) {
|
||||||
|
throw new NumberFormatException(s + " contains an invalid character");
|
||||||
|
}
|
||||||
|
|
||||||
|
int value = Integer.parseInt(s);
|
||||||
|
if (value < 0 || value > 100) {
|
||||||
|
throw new NumberFormatException(value + " is out of range [0-100]");
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
private static long parseTimestampUs(String s) throws NumberFormatException {
|
private static long parseTimestampUs(String s) throws NumberFormatException {
|
||||||
if (!s.matches(WEBVTT_TIMESTAMP_STRING)) {
|
if (!s.matches(WEBVTT_TIMESTAMP_STRING)) {
|
||||||
throw new NumberFormatException("has invalid format");
|
throw new NumberFormatException("has invalid format");
|
||||||
@ -240,16 +302,4 @@ public class WebvttParser implements SubtitleParser {
|
|||||||
return (value * 1000 + Long.parseLong(parts[1])) * 1000;
|
return (value * 1000 + Long.parseLong(parts[1])) * 1000;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class WebvttCue {
|
|
||||||
public final long startTime;
|
|
||||||
public final long endTime;
|
|
||||||
public final String text;
|
|
||||||
|
|
||||||
public WebvttCue(long startTime, long endTime, String text) {
|
|
||||||
this.startTime = startTime;
|
|
||||||
this.endTime = endTime;
|
|
||||||
this.text = text;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -15,32 +15,46 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.text.webvtt;
|
package com.google.android.exoplayer.text.webvtt;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
import com.google.android.exoplayer.text.Subtitle;
|
import com.google.android.exoplayer.text.Subtitle;
|
||||||
import com.google.android.exoplayer.util.Assertions;
|
import com.google.android.exoplayer.util.Assertions;
|
||||||
import com.google.android.exoplayer.util.Util;
|
import com.google.android.exoplayer.util.Util;
|
||||||
|
|
||||||
|
import android.text.SpannableStringBuilder;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A representation of a WebVTT subtitle.
|
* A representation of a WebVTT subtitle.
|
||||||
*/
|
*/
|
||||||
public class WebvttSubtitle implements Subtitle {
|
public class WebvttSubtitle implements Subtitle {
|
||||||
|
|
||||||
private final String[] cueText;
|
private final List<WebvttCue> cues;
|
||||||
|
private final int numCues;
|
||||||
private final long startTimeUs;
|
private final long startTimeUs;
|
||||||
private final long[] cueTimesUs;
|
private final long[] cueTimesUs;
|
||||||
private final long[] sortedCueTimesUs;
|
private final long[] sortedCueTimesUs;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param cueText Text to be displayed during each cue.
|
* @param cues A list of the cues in this subtitle.
|
||||||
* @param startTimeUs The start time of the subtitle.
|
* @param startTimeUs The start time of the subtitle.
|
||||||
* @param cueTimesUs Cue event times, where cueTimesUs[2 * i] and cueTimesUs[(2 * i) + 1] are
|
|
||||||
* the start and end times, respectively, corresponding to cueText[i].
|
|
||||||
*/
|
*/
|
||||||
public WebvttSubtitle(String[] cueText, long startTimeUs, long[] cueTimesUs) {
|
public WebvttSubtitle(List<WebvttCue> cues, long startTimeUs) {
|
||||||
this.cueText = cueText;
|
this.cues = cues;
|
||||||
|
numCues = cues.size();
|
||||||
this.startTimeUs = startTimeUs;
|
this.startTimeUs = startTimeUs;
|
||||||
this.cueTimesUs = cueTimesUs;
|
|
||||||
|
this.cueTimesUs = new long[2 * numCues];
|
||||||
|
for (int cueIndex = 0; cueIndex < numCues; cueIndex++) {
|
||||||
|
WebvttCue cue = cues.get(cueIndex);
|
||||||
|
int arrayIndex = cueIndex * 2;
|
||||||
|
cueTimesUs[arrayIndex] = cue.startTime;
|
||||||
|
cueTimesUs[arrayIndex + 1] = cue.endTime;
|
||||||
|
}
|
||||||
|
|
||||||
this.sortedCueTimesUs = Arrays.copyOf(cueTimesUs, cueTimesUs.length);
|
this.sortedCueTimesUs = Arrays.copyOf(cueTimesUs, cueTimesUs.length);
|
||||||
Arrays.sort(sortedCueTimesUs);
|
Arrays.sort(sortedCueTimesUs);
|
||||||
}
|
}
|
||||||
@ -78,22 +92,47 @@ public class WebvttSubtitle implements Subtitle {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getText(long timeUs) {
|
public List<Cue> getCues(long timeUs) {
|
||||||
StringBuilder stringBuilder = new StringBuilder();
|
ArrayList<Cue> list = null;
|
||||||
|
WebvttCue firstNormalCue = null;
|
||||||
|
SpannableStringBuilder normalCueTextBuilder = null;
|
||||||
|
|
||||||
for (int i = 0; i < cueTimesUs.length; i += 2) {
|
for (int i = 0; i < numCues; i++) {
|
||||||
if ((cueTimesUs[i] <= timeUs) && (timeUs < cueTimesUs[i + 1])) {
|
if ((cueTimesUs[i * 2] <= timeUs) && (timeUs < cueTimesUs[i * 2 + 1])) {
|
||||||
stringBuilder.append(cueText[i / 2]);
|
if (list == null) {
|
||||||
|
list = new ArrayList<Cue>();
|
||||||
|
}
|
||||||
|
WebvttCue cue = cues.get(i);
|
||||||
|
if (cue.isNormalCue()) {
|
||||||
|
// we want to merge all of the normal cues into a single cue to ensure they are drawn
|
||||||
|
// correctly (i.e. don't overlap) and to emulate roll-up, but only if there are multiple
|
||||||
|
// normal cues, otherwise we can just append the single normal cue
|
||||||
|
if (firstNormalCue == null) {
|
||||||
|
firstNormalCue = cue;
|
||||||
|
} else if (normalCueTextBuilder == null) {
|
||||||
|
normalCueTextBuilder = new SpannableStringBuilder();
|
||||||
|
normalCueTextBuilder.append(firstNormalCue.text).append("\n").append(cue.text);
|
||||||
|
} else {
|
||||||
|
normalCueTextBuilder.append("\n").append(cue.text);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
list.add(cue);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (normalCueTextBuilder != null) {
|
||||||
int stringLength = stringBuilder.length();
|
// there were multiple normal cues, so create a new cue with all of the text
|
||||||
if (stringLength > 0 && stringBuilder.charAt(stringLength - 1) == '\n') {
|
list.add(new WebvttCue(normalCueTextBuilder));
|
||||||
// Adjust the length to remove the trailing newline character.
|
} else if (firstNormalCue != null) {
|
||||||
stringLength -= 1;
|
// there was only a single normal cue, so just add it to the list
|
||||||
|
list.add(firstNormalCue);
|
||||||
}
|
}
|
||||||
|
|
||||||
return stringLength == 0 ? null : stringBuilder.substring(0, stringLength);
|
if (list != null) {
|
||||||
|
return list;
|
||||||
|
} else {
|
||||||
|
return Collections.<Cue>emptyList();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -36,28 +36,6 @@ import java.io.IOException;
|
|||||||
*/
|
*/
|
||||||
public final class DefaultUriDataSource implements UriDataSource {
|
public final class DefaultUriDataSource implements UriDataSource {
|
||||||
|
|
||||||
/**
|
|
||||||
* Thrown when a {@link DefaultUriDataSource} is opened for a URI with an unsupported scheme.
|
|
||||||
*/
|
|
||||||
public static final class UnsupportedSchemeException extends IOException {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The unsupported scheme.
|
|
||||||
*/
|
|
||||||
public final String scheme;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param scheme The unsupported scheme.
|
|
||||||
*/
|
|
||||||
public UnsupportedSchemeException(String scheme) {
|
|
||||||
super("Unsupported URI scheme: " + scheme);
|
|
||||||
this.scheme = scheme;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final String SCHEME_HTTP = "http";
|
|
||||||
private static final String SCHEME_HTTPS = "https";
|
|
||||||
private static final String SCHEME_FILE = "file";
|
private static final String SCHEME_FILE = "file";
|
||||||
private static final String SCHEME_ASSET = "asset";
|
private static final String SCHEME_ASSET = "asset";
|
||||||
private static final String SCHEME_CONTENT = "content";
|
private static final String SCHEME_CONTENT = "content";
|
||||||
@ -141,9 +119,7 @@ public final class DefaultUriDataSource implements UriDataSource {
|
|||||||
Assertions.checkState(dataSource == null);
|
Assertions.checkState(dataSource == null);
|
||||||
// Choose the correct source for the scheme.
|
// Choose the correct source for the scheme.
|
||||||
String scheme = dataSpec.uri.getScheme();
|
String scheme = dataSpec.uri.getScheme();
|
||||||
if (SCHEME_HTTP.equals(scheme) || SCHEME_HTTPS.equals(scheme)) {
|
if (SCHEME_FILE.equals(scheme) || TextUtils.isEmpty(scheme)) {
|
||||||
dataSource = httpDataSource;
|
|
||||||
} else if (SCHEME_FILE.equals(scheme) || TextUtils.isEmpty(scheme)) {
|
|
||||||
if (dataSpec.uri.getPath().startsWith("/android_asset/")) {
|
if (dataSpec.uri.getPath().startsWith("/android_asset/")) {
|
||||||
dataSource = assetDataSource;
|
dataSource = assetDataSource;
|
||||||
} else {
|
} else {
|
||||||
@ -154,7 +130,7 @@ public final class DefaultUriDataSource implements UriDataSource {
|
|||||||
} else if (SCHEME_CONTENT.equals(scheme)) {
|
} else if (SCHEME_CONTENT.equals(scheme)) {
|
||||||
dataSource = contentDataSource;
|
dataSource = contentDataSource;
|
||||||
} else {
|
} else {
|
||||||
throw new UnsupportedSchemeException(scheme);
|
dataSource = httpDataSource;
|
||||||
}
|
}
|
||||||
// Open the source and return.
|
// Open the source and return.
|
||||||
return dataSource.open(dataSpec);
|
return dataSource.open(dataSpec);
|
||||||
|
@ -128,21 +128,6 @@ public final class Loader {
|
|||||||
startLoading(myLooper, loadable, callback);
|
startLoading(myLooper, loadable, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Invokes {@link #startLoading(Looper, Loadable, Callback)}, using the {@link Looper}
|
|
||||||
* associated with the calling thread. Loading is delayed by {@code delayMs}.
|
|
||||||
*
|
|
||||||
* @param loadable The {@link Loadable} to load.
|
|
||||||
* @param callback A callback to invoke when the load ends.
|
|
||||||
* @param delayMs Number of milliseconds to wait before calling {@link Loadable#load()}.
|
|
||||||
* @throws IllegalStateException If the calling thread does not have an associated {@link Looper}.
|
|
||||||
*/
|
|
||||||
public void startLoading(Loadable loadable, Callback callback, int delayMs) {
|
|
||||||
Looper myLooper = Looper.myLooper();
|
|
||||||
Assertions.checkState(myLooper != null);
|
|
||||||
startLoading(myLooper, loadable, callback, delayMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start loading a {@link Loadable}.
|
* Start loading a {@link Loadable}.
|
||||||
* <p>
|
* <p>
|
||||||
@ -154,24 +139,9 @@ public final class Loader {
|
|||||||
* @param callback A callback to invoke when the load ends.
|
* @param callback A callback to invoke when the load ends.
|
||||||
*/
|
*/
|
||||||
public void startLoading(Looper looper, Loadable loadable, Callback callback) {
|
public void startLoading(Looper looper, Loadable loadable, Callback callback) {
|
||||||
startLoading(looper, loadable, callback, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start loading a {@link Loadable} after {@code delayMs} has elapsed.
|
|
||||||
* <p>
|
|
||||||
* A {@link Loader} instance can only load one {@link Loadable} at a time, and so this method
|
|
||||||
* must not be called when another load is in progress.
|
|
||||||
*
|
|
||||||
* @param looper The looper of the thread on which the callback should be invoked.
|
|
||||||
* @param loadable The {@link Loadable} to load.
|
|
||||||
* @param callback A callback to invoke when the load ends.
|
|
||||||
* @param delayMs Number of milliseconds to wait before calling {@link Loadable#load()}.
|
|
||||||
*/
|
|
||||||
public void startLoading(Looper looper, Loadable loadable, Callback callback, int delayMs) {
|
|
||||||
Assertions.checkState(!loading);
|
Assertions.checkState(!loading);
|
||||||
loading = true;
|
loading = true;
|
||||||
currentTask = new LoadTask(looper, loadable, callback, delayMs);
|
currentTask = new LoadTask(looper, loadable, callback);
|
||||||
downloadExecutorService.submit(currentTask);
|
downloadExecutorService.submit(currentTask);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,15 +183,13 @@ public final class Loader {
|
|||||||
|
|
||||||
private final Loadable loadable;
|
private final Loadable loadable;
|
||||||
private final Loader.Callback callback;
|
private final Loader.Callback callback;
|
||||||
private final int delayMs;
|
|
||||||
|
|
||||||
private volatile Thread executorThread;
|
private volatile Thread executorThread;
|
||||||
|
|
||||||
public LoadTask(Looper looper, Loadable loadable, Loader.Callback callback, int delayMs) {
|
public LoadTask(Looper looper, Loadable loadable, Loader.Callback callback) {
|
||||||
super(looper);
|
super(looper);
|
||||||
this.loadable = loadable;
|
this.loadable = loadable;
|
||||||
this.callback = callback;
|
this.callback = callback;
|
||||||
this.delayMs = delayMs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void quit() {
|
public void quit() {
|
||||||
@ -235,9 +203,6 @@ public final class Loader {
|
|||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
executorThread = Thread.currentThread();
|
executorThread = Thread.currentThread();
|
||||||
if (delayMs > 0) {
|
|
||||||
Thread.sleep(delayMs);
|
|
||||||
}
|
|
||||||
if (!loadable.isLoadCanceled()) {
|
if (!loadable.isLoadCanceled()) {
|
||||||
loadable.load();
|
loadable.load();
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,140 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer.upstream;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.C;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.DatagramPacket;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.net.MulticastSocket;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A multicast {@link DataSource}.
|
||||||
|
*/
|
||||||
|
public class MulticastDataSource implements UriDataSource {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when an error is encountered when trying to read from a {@link MulticastDataSource}.
|
||||||
|
*/
|
||||||
|
public static final class MulticastDataSourceException extends IOException {
|
||||||
|
|
||||||
|
public MulticastDataSourceException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MulticastDataSourceException(IOException cause) {
|
||||||
|
super(cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static final int DEFAULT_MAX_PACKET_SIZE = 2000;
|
||||||
|
|
||||||
|
public static final int TRANSFER_LISTENER_PACKET_INTERVAL = 1000;
|
||||||
|
|
||||||
|
private final TransferListener transferListener;
|
||||||
|
private final DatagramPacket packet;
|
||||||
|
|
||||||
|
private DataSpec dataSpec;
|
||||||
|
private MulticastSocket socket;
|
||||||
|
private boolean opened;
|
||||||
|
|
||||||
|
private int packetsReceived;
|
||||||
|
private byte[] packetBuffer;
|
||||||
|
private int packetRemaining;
|
||||||
|
|
||||||
|
public MulticastDataSource(TransferListener transferListener) {
|
||||||
|
this(transferListener, DEFAULT_MAX_PACKET_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MulticastDataSource(TransferListener transferListener, int maxPacketSize) {
|
||||||
|
this.transferListener = transferListener;
|
||||||
|
|
||||||
|
packetBuffer = new byte[maxPacketSize];
|
||||||
|
packet = new DatagramPacket(packetBuffer, 0, maxPacketSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long open(DataSpec dataSpec) throws MulticastDataSourceException {
|
||||||
|
this.dataSpec = dataSpec;
|
||||||
|
String uri = dataSpec.uri.toString();
|
||||||
|
String host = uri.substring(0, uri.indexOf(':'));
|
||||||
|
int port = Integer.parseInt(uri.substring(uri.indexOf(':') + 1));
|
||||||
|
|
||||||
|
try {
|
||||||
|
socket = new MulticastSocket(port);
|
||||||
|
socket.joinGroup(InetAddress.getByName(host));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new MulticastDataSourceException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
opened = true;
|
||||||
|
transferListener.onTransferStart();
|
||||||
|
return C.LENGTH_UNBOUNDED;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
if (opened) {
|
||||||
|
socket.close();
|
||||||
|
socket = null;
|
||||||
|
transferListener.onTransferEnd();
|
||||||
|
packetRemaining = 0;
|
||||||
|
packetsReceived = 0;
|
||||||
|
opened = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int read(byte[] buffer, int offset, int readLength) throws MulticastDataSourceException {
|
||||||
|
// if we've read all the data, get another packet
|
||||||
|
if (packetRemaining == 0) {
|
||||||
|
if (packetsReceived == TRANSFER_LISTENER_PACKET_INTERVAL) {
|
||||||
|
transferListener.onTransferEnd();
|
||||||
|
transferListener.onTransferStart();
|
||||||
|
packetsReceived = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
socket.receive(packet);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new MulticastDataSourceException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
packetRemaining = packet.getLength();
|
||||||
|
transferListener.onBytesTransferred(packetRemaining);
|
||||||
|
packetsReceived++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't try to read too much
|
||||||
|
if (packetRemaining < readLength) {
|
||||||
|
readLength = packetRemaining;
|
||||||
|
}
|
||||||
|
|
||||||
|
int packetOffset = packet.getLength() - packetRemaining;
|
||||||
|
System.arraycopy(packetBuffer, packetOffset, buffer, offset, readLength);
|
||||||
|
packetRemaining -= readLength;
|
||||||
|
|
||||||
|
return readLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getUri() {
|
||||||
|
return dataSpec == null ? null : dataSpec.uri.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -26,6 +26,7 @@ public final class ParsableBitArray {
|
|||||||
// byte (from 0 to 7).
|
// byte (from 0 to 7).
|
||||||
private int byteOffset;
|
private int byteOffset;
|
||||||
private int bitOffset;
|
private int bitOffset;
|
||||||
|
private int byteLimit;
|
||||||
|
|
||||||
/** Creates a new instance that initially has no backing data. */
|
/** Creates a new instance that initially has no backing data. */
|
||||||
public ParsableBitArray() {}
|
public ParsableBitArray() {}
|
||||||
@ -36,7 +37,18 @@ public final class ParsableBitArray {
|
|||||||
* @param data The data to wrap.
|
* @param data The data to wrap.
|
||||||
*/
|
*/
|
||||||
public ParsableBitArray(byte[] data) {
|
public ParsableBitArray(byte[] data) {
|
||||||
|
this(data, data.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new instance that wraps an existing array.
|
||||||
|
*
|
||||||
|
* @param data The data to wrap.
|
||||||
|
* @param limit The limit in bytes.
|
||||||
|
*/
|
||||||
|
public ParsableBitArray(byte[] data, int limit) {
|
||||||
this.data = data;
|
this.data = data;
|
||||||
|
byteLimit = limit;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -45,9 +57,27 @@ public final class ParsableBitArray {
|
|||||||
* @param data The array to wrap.
|
* @param data The array to wrap.
|
||||||
*/
|
*/
|
||||||
public void reset(byte[] data) {
|
public void reset(byte[] data) {
|
||||||
|
reset(data, data.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the instance to wrap {@code data}, and resets the position to zero.
|
||||||
|
*
|
||||||
|
* @param data The array to wrap.
|
||||||
|
* @param limit The limit in bytes.
|
||||||
|
*/
|
||||||
|
public void reset(byte[] data, int limit) {
|
||||||
this.data = data;
|
this.data = data;
|
||||||
byteOffset = 0;
|
byteOffset = 0;
|
||||||
bitOffset = 0;
|
bitOffset = 0;
|
||||||
|
byteLimit = limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the number of bits yet to be read.
|
||||||
|
*/
|
||||||
|
public int bitsLeft() {
|
||||||
|
return (byteLimit - byteOffset) * 8 - bitOffset;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -67,6 +97,7 @@ public final class ParsableBitArray {
|
|||||||
public void setPosition(int position) {
|
public void setPosition(int position) {
|
||||||
byteOffset = position / 8;
|
byteOffset = position / 8;
|
||||||
bitOffset = position - (byteOffset * 8);
|
bitOffset = position - (byteOffset * 8);
|
||||||
|
assertValidOffset();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -81,6 +112,7 @@ public final class ParsableBitArray {
|
|||||||
byteOffset++;
|
byteOffset++;
|
||||||
bitOffset -= 8;
|
bitOffset -= 8;
|
||||||
}
|
}
|
||||||
|
assertValidOffset();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -103,12 +135,20 @@ public final class ParsableBitArray {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
int retval = 0;
|
int returnValue = 0;
|
||||||
|
|
||||||
// While n >= 8, read whole bytes.
|
// While n >= 8, read whole bytes.
|
||||||
while (n >= 8) {
|
while (n >= 8) {
|
||||||
|
int byteValue;
|
||||||
|
if (bitOffset != 0) {
|
||||||
|
byteValue = ((data[byteOffset] & 0xFF) << bitOffset)
|
||||||
|
| ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
|
||||||
|
} else {
|
||||||
|
byteValue = data[byteOffset];
|
||||||
|
}
|
||||||
n -= 8;
|
n -= 8;
|
||||||
retval |= (readUnsignedByte() << n);
|
returnValue |= (byteValue & 0xFF) << n;
|
||||||
|
byteOffset++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (n > 0) {
|
if (n > 0) {
|
||||||
@ -117,12 +157,12 @@ public final class ParsableBitArray {
|
|||||||
|
|
||||||
if (nextBit > 8) {
|
if (nextBit > 8) {
|
||||||
// Combine bits from current byte and next byte.
|
// Combine bits from current byte and next byte.
|
||||||
retval |= (((getUnsignedByte(byteOffset) << (nextBit - 8)
|
returnValue |= ((((data[byteOffset] & 0xFF) << (nextBit - 8)
|
||||||
| (getUnsignedByte(byteOffset + 1) >> (16 - nextBit))) & writeMask));
|
| ((data[byteOffset + 1] & 0xFF) >> (16 - nextBit))) & writeMask));
|
||||||
byteOffset++;
|
byteOffset++;
|
||||||
} else {
|
} else {
|
||||||
// Bits to be read only within current byte.
|
// Bits to be read only within current byte.
|
||||||
retval |= ((getUnsignedByte(byteOffset) >> (8 - nextBit)) & writeMask);
|
returnValue |= (((data[byteOffset] & 0xFF) >> (8 - nextBit)) & writeMask);
|
||||||
if (nextBit == 8) {
|
if (nextBit == 8) {
|
||||||
byteOffset++;
|
byteOffset++;
|
||||||
}
|
}
|
||||||
@ -131,7 +171,27 @@ public final class ParsableBitArray {
|
|||||||
bitOffset = nextBit % 8;
|
bitOffset = nextBit % 8;
|
||||||
}
|
}
|
||||||
|
|
||||||
return retval;
|
assertValidOffset();
|
||||||
|
return returnValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Peeks the length of an Exp-Golomb-coded integer (signed or unsigned) starting from the current
|
||||||
|
* offset, returning the length or -1 if the limit is reached.
|
||||||
|
*
|
||||||
|
* @return The length of the Exp-Golob-coded integer, or -1.
|
||||||
|
*/
|
||||||
|
public int peekExpGolombCodedNumLength() {
|
||||||
|
int initialByteOffset = byteOffset;
|
||||||
|
int initialBitOffset = bitOffset;
|
||||||
|
int leadingZeros = 0;
|
||||||
|
while (byteOffset < byteLimit && !readBit()) {
|
||||||
|
leadingZeros++;
|
||||||
|
}
|
||||||
|
boolean hitLimit = byteOffset == byteLimit;
|
||||||
|
byteOffset = initialByteOffset;
|
||||||
|
bitOffset = initialBitOffset;
|
||||||
|
return hitLimit ? -1 : leadingZeros * 2 + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -153,22 +213,6 @@ public final class ParsableBitArray {
|
|||||||
return ((codeNum % 2) == 0 ? -1 : 1) * ((codeNum + 1) / 2);
|
return ((codeNum % 2) == 0 ? -1 : 1) * ((codeNum + 1) / 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
private int readUnsignedByte() {
|
|
||||||
int value;
|
|
||||||
if (bitOffset != 0) {
|
|
||||||
value = ((data[byteOffset] & 0xFF) << bitOffset)
|
|
||||||
| ((data[byteOffset + 1] & 0xFF) >>> (8 - bitOffset));
|
|
||||||
} else {
|
|
||||||
value = data[byteOffset];
|
|
||||||
}
|
|
||||||
byteOffset++;
|
|
||||||
return value & 0xFF;
|
|
||||||
}
|
|
||||||
|
|
||||||
private int getUnsignedByte(int offset) {
|
|
||||||
return data[offset] & 0xFF;
|
|
||||||
}
|
|
||||||
|
|
||||||
private int readExpGolombCodeNum() {
|
private int readExpGolombCodeNum() {
|
||||||
int leadingZeros = 0;
|
int leadingZeros = 0;
|
||||||
while (!readBit()) {
|
while (!readBit()) {
|
||||||
@ -177,4 +221,11 @@ public final class ParsableBitArray {
|
|||||||
return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
|
return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void assertValidOffset() {
|
||||||
|
// It is fine for position to be at the end of the array, but no further.
|
||||||
|
Assertions.checkState(byteOffset >= 0
|
||||||
|
&& (bitOffset >= 0 && bitOffset < 8)
|
||||||
|
&& (byteOffset < byteLimit || (byteOffset == byteLimit && bitOffset == 0)));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -541,6 +541,22 @@ public final class Util {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a hex string representation of the data provided.
|
||||||
|
*
|
||||||
|
* @param data The byte array containing the data to be turned into a hex string.
|
||||||
|
* @param beginIndex The begin index, inclusive.
|
||||||
|
* @param endIndex The end index, exclusive.
|
||||||
|
* @return A string containing the hex representation of the data provided.
|
||||||
|
*/
|
||||||
|
public static String getHexStringFromBytes(byte[] data, int beginIndex, int endIndex) {
|
||||||
|
StringBuffer dataStringBuffer = new StringBuffer(endIndex - beginIndex);
|
||||||
|
for (int i = beginIndex; i < endIndex; i++) {
|
||||||
|
dataStringBuffer.append(String.format("%02X", data[i]));
|
||||||
|
}
|
||||||
|
return dataStringBuffer.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a user agent string based on the given application name and the library version.
|
* Returns a user agent string based on the given application name and the library version.
|
||||||
*
|
*
|
||||||
|
@ -11,4 +11,4 @@ This is the <b><i>second</b></i> subtitle.
|
|||||||
This is the <c.red.caps>third</c> subtitle.
|
This is the <c.red.caps>third</c> subtitle.
|
||||||
|
|
||||||
00:06.000 --> 00:07.000
|
00:06.000 --> 00:07.000
|
||||||
This is the <fourth> &subtitle.
|
This is the <fourth> &subtitle.
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2014 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package com.google.android.exoplayer;
|
||||||
|
|
||||||
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unit test for {@link TimeRange}.
|
||||||
|
*/
|
||||||
|
public class TimeRangeTest extends TestCase {
|
||||||
|
|
||||||
|
public void testEquals() {
|
||||||
|
TimeRange timeRange1 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 30000000);
|
||||||
|
assertTrue(timeRange1.equals(timeRange1));
|
||||||
|
|
||||||
|
TimeRange timeRange2 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 30000000);
|
||||||
|
assertTrue(timeRange1.equals(timeRange2));
|
||||||
|
|
||||||
|
TimeRange timeRange3 = new TimeRange(TimeRange.TYPE_SNAPSHOT, 0, 60000000);
|
||||||
|
assertFalse(timeRange1.equals(timeRange3));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -15,9 +15,11 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.dash;
|
package com.google.android.exoplayer.dash;
|
||||||
|
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import com.google.android.exoplayer.MediaFormat;
|
import com.google.android.exoplayer.MediaFormat;
|
||||||
|
import com.google.android.exoplayer.TimeRange;
|
||||||
import com.google.android.exoplayer.TrackRenderer;
|
import com.google.android.exoplayer.TrackRenderer;
|
||||||
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
|
||||||
import com.google.android.exoplayer.chunk.Format;
|
import com.google.android.exoplayer.chunk.Format;
|
||||||
@ -55,12 +57,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
|
|
||||||
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
|
private static final FormatEvaluator EVALUATOR = new FixedEvaluator();
|
||||||
|
|
||||||
private static final long AVAILABILITY_START_TIME = 0;
|
private static final long VOD_DURATION = 30000;
|
||||||
private static final long AVAILABILITY_LATENCY = 5000;
|
|
||||||
private static final long AVAILABILITY_REALTIME_OFFSET = 1000;
|
private static final long LIVE_SEGMENT_COUNT = 5;
|
||||||
private static final long AVAILABILITY_CURRENT_TIME =
|
private static final long LIVE_SEGMENT_DURATION_MS = 1000;
|
||||||
AVAILABILITY_START_TIME + AVAILABILITY_LATENCY - AVAILABILITY_REALTIME_OFFSET;
|
private static final long LIVE_TIMESHIFT_BUFFER_DEPTH_MS =
|
||||||
private static final FakeClock AVAILABILITY_CLOCK = new FakeClock(AVAILABILITY_CURRENT_TIME);
|
LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS;
|
||||||
|
|
||||||
|
private static final long AVAILABILITY_START_TIME_MS = 60000;
|
||||||
|
private static final long AVAILABILITY_REALTIME_OFFSET_MS = 1000;
|
||||||
|
private static final long AVAILABILITY_CURRENT_TIME_MS =
|
||||||
|
AVAILABILITY_START_TIME_MS + LIVE_TIMESHIFT_BUFFER_DEPTH_MS - AVAILABILITY_REALTIME_OFFSET_MS;
|
||||||
|
|
||||||
|
private static final long LIVE_SEEK_BEYOND_EDGE_MS = 60000;
|
||||||
|
|
||||||
private static final int TALL_HEIGHT = 200;
|
private static final int TALL_HEIGHT = 200;
|
||||||
private static final int WIDE_WIDTH = 400;
|
private static final int WIDE_WIDTH = 400;
|
||||||
@ -90,6 +99,21 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetSeekRangeOnVod() {
|
||||||
|
DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO,
|
||||||
|
null, null, mock(FormatEvaluator.class));
|
||||||
|
chunkSource.enable();
|
||||||
|
TimeRange seekRange = chunkSource.getSeekRange();
|
||||||
|
|
||||||
|
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
|
||||||
|
assertEquals(0, seekRangeValuesUs[0]);
|
||||||
|
assertEquals(VOD_DURATION * 1000, seekRangeValuesUs[1]);
|
||||||
|
|
||||||
|
long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null);
|
||||||
|
assertEquals(0, seekRangeValuesMs[0]);
|
||||||
|
assertEquals(VOD_DURATION, seekRangeValuesMs[1]);
|
||||||
|
}
|
||||||
|
|
||||||
public void testMaxVideoDimensionsLegacy() {
|
public void testMaxVideoDimensionsLegacy() {
|
||||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
||||||
Representation representation1 =
|
Representation representation1 =
|
||||||
@ -107,147 +131,254 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
assertEquals(TALL_HEIGHT, out.getMaxVideoHeight());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeNoLatencyWithTimeline() {
|
public void testLiveEdgeNoLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(0L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 0;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge500msLatencyWithTimeline() {
|
public void testLiveEdgeAlmostNoLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(500L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1000msLatencyWithTimeline() {
|
public void testLiveEdge500msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1000L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1001msLatencyWithTimeline() {
|
public void testLiveEdge1000msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(1001L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 4000;
|
||||||
|
long chunkEndTimeMs = 5000;
|
||||||
|
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge2500msLatencyWithTimeline() {
|
public void testLiveEdge1001msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(2500L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1001;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 3000;
|
||||||
|
long chunkEndTimeMs = 4000;
|
||||||
|
|
||||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeVeryHighLatencyWithTimeline() {
|
public void testLiveEdge2500msLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(10000L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 2500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 2000;
|
||||||
|
long chunkEndTimeMs = 3000;
|
||||||
|
|
||||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeNoLatencyWithTemplate() {
|
public void testLiveEdgeVeryHighLatency() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(0L);
|
long startTimeMs = 0;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 10000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 0;
|
||||||
|
long seekRangeEndMs = 0;
|
||||||
|
long chunkStartTimeMs = 0;
|
||||||
|
long chunkEndTimeMs = 1000;
|
||||||
|
|
||||||
// this should actually return the "5th" segment, but it currently returns the "6th", which
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeAlmostNoLatencyWithTemplate() {
|
public void testLiveEdgeNoLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 0;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge500msLatencyWithTemplate() {
|
public void testLiveEdgeAlmostNoLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(500L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1000msLatencyWithTemplate() {
|
public void testLiveEdge500msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1000L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge1001msLatencyWithTemplate() {
|
public void testLiveEdge1000msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(1001L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1000;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 7000;
|
||||||
|
long chunkEndTimeMs = 8000;
|
||||||
|
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(4000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdge2500msLatencyWithTemplate() {
|
public void testLiveEdge1001msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(2500L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 1001;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 6000;
|
||||||
|
long chunkEndTimeMs = 7000;
|
||||||
|
|
||||||
assertEquals(2000000L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(3000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiveEdgeVeryHighLatencyWithTemplate() {
|
public void testLiveEdge2500msLatencyInProgress() {
|
||||||
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(10000L);
|
long startTimeMs = 3000;
|
||||||
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
long liveEdgeLatencyMs = 2500;
|
||||||
ChunkOperationHolder out = new ChunkOperationHolder();
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
chunkSource.getChunkOperation(queue, 0, 0, out);
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000 + LIVE_SEGMENT_COUNT * LIVE_SEGMENT_DURATION_MS - liveEdgeLatencyMs;
|
||||||
|
long chunkStartTimeMs = 5000;
|
||||||
|
long chunkEndTimeMs = 6000;
|
||||||
|
|
||||||
assertEquals(0L, ((MediaChunk) out.chunk).startTimeUs);
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
assertEquals(1000000L, ((MediaChunk) out.chunk).endTimeUs);
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testLiveEdgeVeryHighLatencyInProgress() {
|
||||||
|
long startTimeMs = 3000;
|
||||||
|
long liveEdgeLatencyMs = 10000;
|
||||||
|
long seekPositionMs = LIVE_SEEK_BEYOND_EDGE_MS;
|
||||||
|
long seekRangeStartMs = 3000;
|
||||||
|
long seekRangeEndMs = 3000;
|
||||||
|
long chunkStartTimeMs = 3000;
|
||||||
|
long chunkEndTimeMs = 4000;
|
||||||
|
|
||||||
|
checkLiveEdgeLatencyWithTimeline(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, 0, 0, 1000);
|
||||||
|
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
seekPositionMs, seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateMpd(boolean live,
|
private static MediaPresentationDescription generateMpd(boolean live,
|
||||||
List<Representation> representations) {
|
List<Representation> representations, boolean limitTimeshiftBuffer) {
|
||||||
Representation firstRepresentation = representations.get(0);
|
Representation firstRepresentation = representations.get(0);
|
||||||
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
|
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
|
||||||
Period period = new Period(null, firstRepresentation.periodStartMs,
|
Period period = new Period(null, firstRepresentation.periodStartMs,
|
||||||
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
|
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
|
||||||
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
|
long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US
|
||||||
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
|
: firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
|
||||||
return new MediaPresentationDescription(AVAILABILITY_START_TIME, duration, -1, live, -1, -1,
|
return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, duration, -1, live, -1,
|
||||||
|
(limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1,
|
||||||
null, Collections.singletonList(period));
|
null, Collections.singletonList(period));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,72 +387,126 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
|
|||||||
|
|
||||||
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
SingleSegmentBase segmentBase1 = new SingleSegmentBase("https://example.com/1.mp4");
|
||||||
Representation representation1 =
|
Representation representation1 =
|
||||||
Representation.newInstance(0, 0, null, 0, TALL_VIDEO, segmentBase1);
|
Representation.newInstance(0, VOD_DURATION, null, 0, TALL_VIDEO, segmentBase1);
|
||||||
representations.add(representation1);
|
representations.add(representation1);
|
||||||
|
|
||||||
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
|
SingleSegmentBase segmentBase2 = new SingleSegmentBase("https://example.com/2.mp4");
|
||||||
Representation representation2 =
|
Representation representation2 =
|
||||||
Representation.newInstance(0, 0, null, 0, WIDE_VIDEO, segmentBase2);
|
Representation.newInstance(0, VOD_DURATION, null, 0, WIDE_VIDEO, segmentBase2);
|
||||||
representations.add(representation2);
|
representations.add(representation2);
|
||||||
|
|
||||||
return generateMpd(false, representations);
|
return generateMpd(false, representations, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateLiveMpdWithTimeline() {
|
private static MediaPresentationDescription generateLiveMpdWithTimeline(long startTime) {
|
||||||
List<Representation> representations = new ArrayList<Representation>();
|
List<Representation> representations = new ArrayList<Representation>();
|
||||||
|
|
||||||
List<SegmentTimelineElement> segmentTimeline = new ArrayList<SegmentTimelineElement>();
|
List<SegmentTimelineElement> segmentTimeline = new ArrayList<SegmentTimelineElement>();
|
||||||
segmentTimeline.add(new SegmentTimelineElement(0L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(1000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(2000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(3000L, 1000L));
|
|
||||||
segmentTimeline.add(new SegmentTimelineElement(4000L, 1000L));
|
|
||||||
List<RangedUri> mediaSegments = new ArrayList<RangedUri>();
|
List<RangedUri> mediaSegments = new ArrayList<RangedUri>();
|
||||||
mediaSegments.add(new RangedUri("", "", 0L, 500L));
|
long byteStart = 0;
|
||||||
mediaSegments.add(new RangedUri("", "", 500L, 500L));
|
for (int i = 0; i < LIVE_SEGMENT_COUNT; i++) {
|
||||||
mediaSegments.add(new RangedUri("", "", 1000L, 500L));
|
segmentTimeline.add(new SegmentTimelineElement(startTime, LIVE_SEGMENT_DURATION_MS));
|
||||||
mediaSegments.add(new RangedUri("", "", 1500L, 500L));
|
mediaSegments.add(new RangedUri("", "", byteStart, 500L));
|
||||||
mediaSegments.add(new RangedUri("", "", 2000L, 500L));
|
startTime += LIVE_SEGMENT_DURATION_MS;
|
||||||
|
byteStart += 500;
|
||||||
|
}
|
||||||
|
|
||||||
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
|
MultiSegmentBase segmentBase = new SegmentList(null, 1000, 0,
|
||||||
TrackRenderer.UNKNOWN_TIME_US, 1, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
|
TrackRenderer.UNKNOWN_TIME_US, 0, TrackRenderer.UNKNOWN_TIME_US, segmentTimeline,
|
||||||
mediaSegments);
|
mediaSegments);
|
||||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
Representation representation = Representation.newInstance(startTime,
|
||||||
null, 0, REGULAR_VIDEO, segmentBase);
|
TrackRenderer.UNKNOWN_TIME_US, null, 0, REGULAR_VIDEO, segmentBase);
|
||||||
representations.add(representation);
|
representations.add(representation);
|
||||||
|
|
||||||
return generateMpd(true, representations);
|
return generateMpd(true, representations, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaPresentationDescription generateLiveMpdWithTemplate() {
|
private static MediaPresentationDescription generateLiveMpdWithTemplate(
|
||||||
|
boolean limitTimeshiftBuffer) {
|
||||||
List<Representation> representations = new ArrayList<Representation>();
|
List<Representation> representations = new ArrayList<Representation>();
|
||||||
|
|
||||||
UrlTemplate initializationTemplate = null;
|
UrlTemplate initializationTemplate = null;
|
||||||
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
|
UrlTemplate mediaTemplate = UrlTemplate.compile("$RepresentationID$/$Number$");
|
||||||
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
|
MultiSegmentBase segmentBase = new SegmentTemplate(null, 1000, 0,
|
||||||
TrackRenderer.UNKNOWN_TIME_US, 1, 1000, null,
|
TrackRenderer.UNKNOWN_TIME_US, 0, LIVE_SEGMENT_DURATION_MS, null,
|
||||||
initializationTemplate, mediaTemplate, "http://www.youtube.com");
|
initializationTemplate, mediaTemplate, "http://www.youtube.com");
|
||||||
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
Representation representation = Representation.newInstance(0, TrackRenderer.UNKNOWN_TIME_US,
|
||||||
null, 0, REGULAR_VIDEO, segmentBase);
|
null, 0, REGULAR_VIDEO, segmentBase);
|
||||||
representations.add(representation);
|
representations.add(representation);
|
||||||
|
|
||||||
return generateMpd(true, representations);
|
return generateMpd(true, representations, limitTimeshiftBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
private DashChunkSource setupLiveEdgeTimelineTest(long liveEdgeLatencyMs) {
|
private DashChunkSource setupLiveEdgeTimelineTest(long startTime, long liveEdgeLatencyMs) {
|
||||||
MediaPresentationDescription manifest = generateLiveMpdWithTimeline();
|
MediaPresentationDescription manifest = generateLiveMpdWithTimeline(startTime);
|
||||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
||||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
|
||||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
|
||||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
|
||||||
|
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
|
||||||
|
chunkSource.enable();
|
||||||
|
return chunkSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DashChunkSource setupLiveEdgeTemplateTest(long liveEdgeLatencyMs) {
|
private DashChunkSource setupLiveEdgeTemplateTest(long startTime, long liveEdgeLatencyMs,
|
||||||
MediaPresentationDescription manifest = generateLiveMpdWithTemplate();
|
boolean limitTimeshiftBuffer) {
|
||||||
|
MediaPresentationDescription manifest = generateLiveMpdWithTemplate(limitTimeshiftBuffer);
|
||||||
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
when(mockManifestFetcher.getManifest()).thenReturn(manifest);
|
||||||
return new DashChunkSource(mockManifestFetcher, manifest, AdaptationSet.TYPE_VIDEO, null,
|
DashChunkSource chunkSource = new DashChunkSource(mockManifestFetcher, manifest,
|
||||||
mockDataSource, EVALUATOR, AVAILABILITY_CLOCK, liveEdgeLatencyMs * 1000,
|
AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR,
|
||||||
AVAILABILITY_REALTIME_OFFSET * 1000);
|
new FakeClock(AVAILABILITY_CURRENT_TIME_MS + startTime), liveEdgeLatencyMs * 1000,
|
||||||
|
AVAILABILITY_REALTIME_OFFSET_MS * 1000, null, null);
|
||||||
|
chunkSource.enable();
|
||||||
|
return chunkSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTimeline(long startTimeMs, long liveEdgeLatencyMs,
|
||||||
|
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
|
||||||
|
long chunkEndTimeMs) {
|
||||||
|
DashChunkSource chunkSource = setupLiveEdgeTimelineTest(startTimeMs, liveEdgeLatencyMs);
|
||||||
|
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||||
|
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||||
|
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
|
||||||
|
TimeRange seekRange = chunkSource.getSeekRange();
|
||||||
|
|
||||||
|
assertNotNull(out.chunk);
|
||||||
|
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
|
||||||
|
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
|
||||||
|
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
|
||||||
|
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
|
||||||
|
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTemplate(long startTimeMs, long liveEdgeLatencyMs,
|
||||||
|
long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs, long chunkStartTimeMs,
|
||||||
|
long chunkEndTimeMs, boolean limitTimeshiftBuffer) {
|
||||||
|
DashChunkSource chunkSource = setupLiveEdgeTemplateTest(startTimeMs, liveEdgeLatencyMs,
|
||||||
|
limitTimeshiftBuffer);
|
||||||
|
List<MediaChunk> queue = new ArrayList<MediaChunk>();
|
||||||
|
ChunkOperationHolder out = new ChunkOperationHolder();
|
||||||
|
chunkSource.getChunkOperation(queue, seekPositionMs * 1000, 0, out);
|
||||||
|
TimeRange seekRange = chunkSource.getSeekRange();
|
||||||
|
|
||||||
|
assertNotNull(out.chunk);
|
||||||
|
long[] seekRangeValuesUs = seekRange.getCurrentBoundsUs(null);
|
||||||
|
assertEquals(seekRangeStartMs * 1000, seekRangeValuesUs[0]);
|
||||||
|
assertEquals(seekRangeEndMs * 1000, seekRangeValuesUs[1]);
|
||||||
|
assertEquals(chunkStartTimeMs * 1000, ((MediaChunk) out.chunk).startTimeUs);
|
||||||
|
assertEquals(chunkEndTimeMs * 1000, ((MediaChunk) out.chunk).endTimeUs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift(long startTimeMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs, 0,
|
||||||
|
seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift(long startTimeMs,
|
||||||
|
long liveEdgeLatencyMs, long seekPositionMs, long seekRangeStartMs, long seekRangeEndMs,
|
||||||
|
long chunkStartTimeMs, long chunkEndTimeMs) {
|
||||||
|
checkLiveEdgeLatencyWithTemplate(startTimeMs, liveEdgeLatencyMs, seekPositionMs,
|
||||||
|
seekRangeStartMs, seekRangeEndMs, chunkStartTimeMs, chunkEndTimeMs, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -59,13 +59,13 @@ public class WebvttParserTest extends InstrumentationTestCase {
|
|||||||
// test first cue
|
// test first cue
|
||||||
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
||||||
assertEquals("This is the first subtitle.",
|
assertEquals("This is the first subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(0)));
|
subtitle.getCues(subtitle.getEventTime(0)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
||||||
|
|
||||||
// test second cue
|
// test second cue
|
||||||
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
||||||
assertEquals("This is the second subtitle.",
|
assertEquals("This is the second subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(2)));
|
subtitle.getCues(subtitle.getEventTime(2)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,13 +84,13 @@ public class WebvttParserTest extends InstrumentationTestCase {
|
|||||||
// test first cue
|
// test first cue
|
||||||
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
||||||
assertEquals("This is the first subtitle.",
|
assertEquals("This is the first subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(0)));
|
subtitle.getCues(subtitle.getEventTime(0)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
||||||
|
|
||||||
// test second cue
|
// test second cue
|
||||||
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
||||||
assertEquals("This is the second subtitle.",
|
assertEquals("This is the second subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(2)));
|
subtitle.getCues(subtitle.getEventTime(2)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,25 +109,25 @@ public class WebvttParserTest extends InstrumentationTestCase {
|
|||||||
// test first cue
|
// test first cue
|
||||||
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
assertEquals(startTimeUs, subtitle.getEventTime(0));
|
||||||
assertEquals("This is the first subtitle.",
|
assertEquals("This is the first subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(0)));
|
subtitle.getCues(subtitle.getEventTime(0)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
assertEquals(startTimeUs + 1234000, subtitle.getEventTime(1));
|
||||||
|
|
||||||
// test second cue
|
// test second cue
|
||||||
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
assertEquals(startTimeUs + 2345000, subtitle.getEventTime(2));
|
||||||
assertEquals("This is the second subtitle.",
|
assertEquals("This is the second subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(2)));
|
subtitle.getCues(subtitle.getEventTime(2)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
assertEquals(startTimeUs + 3456000, subtitle.getEventTime(3));
|
||||||
|
|
||||||
// test third cue
|
// test third cue
|
||||||
assertEquals(startTimeUs + 4000000, subtitle.getEventTime(4));
|
assertEquals(startTimeUs + 4000000, subtitle.getEventTime(4));
|
||||||
assertEquals("This is the third subtitle.",
|
assertEquals("This is the third subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(4)));
|
subtitle.getCues(subtitle.getEventTime(4)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 5000000, subtitle.getEventTime(5));
|
assertEquals(startTimeUs + 5000000, subtitle.getEventTime(5));
|
||||||
|
|
||||||
// test fourth cue
|
// test fourth cue
|
||||||
assertEquals(startTimeUs + 6000000, subtitle.getEventTime(6));
|
assertEquals(startTimeUs + 6000000, subtitle.getEventTime(6));
|
||||||
assertEquals("This is the <fourth> &subtitle.",
|
assertEquals("This is the <fourth> &subtitle.",
|
||||||
subtitle.getText(subtitle.getEventTime(6)));
|
subtitle.getCues(subtitle.getEventTime(6)).get(0).text.toString());
|
||||||
assertEquals(startTimeUs + 7000000, subtitle.getEventTime(7));
|
assertEquals(startTimeUs + 7000000, subtitle.getEventTime(7));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,8 +15,13 @@
|
|||||||
*/
|
*/
|
||||||
package com.google.android.exoplayer.text.webvtt;
|
package com.google.android.exoplayer.text.webvtt;
|
||||||
|
|
||||||
|
import com.google.android.exoplayer.text.Cue;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Unit test for {@link WebvttSubtitle}.
|
* Unit test for {@link WebvttSubtitle}.
|
||||||
*/
|
*/
|
||||||
@ -25,21 +30,39 @@ public class WebvttSubtitleTest extends TestCase {
|
|||||||
private static final String FIRST_SUBTITLE_STRING = "This is the first subtitle.";
|
private static final String FIRST_SUBTITLE_STRING = "This is the first subtitle.";
|
||||||
private static final String SECOND_SUBTITLE_STRING = "This is the second subtitle.";
|
private static final String SECOND_SUBTITLE_STRING = "This is the second subtitle.";
|
||||||
private static final String FIRST_AND_SECOND_SUBTITLE_STRING =
|
private static final String FIRST_AND_SECOND_SUBTITLE_STRING =
|
||||||
FIRST_SUBTITLE_STRING + SECOND_SUBTITLE_STRING;
|
FIRST_SUBTITLE_STRING + "\n" + SECOND_SUBTITLE_STRING;
|
||||||
|
|
||||||
private WebvttSubtitle emptySubtitle = new WebvttSubtitle(new String[] {}, 0, new long[] {});
|
private WebvttSubtitle emptySubtitle = new WebvttSubtitle(new ArrayList<WebvttCue>(), 0);
|
||||||
|
|
||||||
private WebvttSubtitle simpleSubtitle = new WebvttSubtitle(
|
private ArrayList<WebvttCue> simpleSubtitleCues = new ArrayList<WebvttCue>();
|
||||||
new String[] {FIRST_SUBTITLE_STRING, SECOND_SUBTITLE_STRING}, 0,
|
{
|
||||||
new long[] {1000000, 2000000, 3000000, 4000000});
|
WebvttCue firstCue = new WebvttCue(1000000, 2000000, FIRST_SUBTITLE_STRING);
|
||||||
|
simpleSubtitleCues.add(firstCue);
|
||||||
|
|
||||||
private WebvttSubtitle overlappingSubtitle = new WebvttSubtitle(
|
WebvttCue secondCue = new WebvttCue(3000000, 4000000, SECOND_SUBTITLE_STRING);
|
||||||
new String[] {FIRST_SUBTITLE_STRING, SECOND_SUBTITLE_STRING}, 0,
|
simpleSubtitleCues.add(secondCue);
|
||||||
new long[] {1000000, 3000000, 2000000, 4000000});
|
}
|
||||||
|
private WebvttSubtitle simpleSubtitle = new WebvttSubtitle(simpleSubtitleCues, 0);
|
||||||
|
|
||||||
private WebvttSubtitle nestedSubtitle = new WebvttSubtitle(
|
private ArrayList<WebvttCue> overlappingSubtitleCues = new ArrayList<WebvttCue>();
|
||||||
new String[] {FIRST_SUBTITLE_STRING, SECOND_SUBTITLE_STRING}, 0,
|
{
|
||||||
new long[] {1000000, 4000000, 2000000, 3000000});
|
WebvttCue firstCue = new WebvttCue(1000000, 3000000, FIRST_SUBTITLE_STRING);
|
||||||
|
overlappingSubtitleCues.add(firstCue);
|
||||||
|
|
||||||
|
WebvttCue secondCue = new WebvttCue(2000000, 4000000, SECOND_SUBTITLE_STRING);
|
||||||
|
overlappingSubtitleCues.add(secondCue);
|
||||||
|
}
|
||||||
|
private WebvttSubtitle overlappingSubtitle = new WebvttSubtitle(overlappingSubtitleCues, 0);
|
||||||
|
|
||||||
|
private ArrayList<WebvttCue> nestedSubtitleCues = new ArrayList<WebvttCue>();
|
||||||
|
{
|
||||||
|
WebvttCue firstCue = new WebvttCue(1000000, 4000000, FIRST_SUBTITLE_STRING);
|
||||||
|
nestedSubtitleCues.add(firstCue);
|
||||||
|
|
||||||
|
WebvttCue secondCue = new WebvttCue(2000000, 3000000, SECOND_SUBTITLE_STRING);
|
||||||
|
nestedSubtitleCues.add(secondCue);
|
||||||
|
}
|
||||||
|
private WebvttSubtitle nestedSubtitle = new WebvttSubtitle(nestedSubtitleCues, 0);
|
||||||
|
|
||||||
public void testEventCount() {
|
public void testEventCount() {
|
||||||
assertEquals(0, emptySubtitle.getEventTimeCount());
|
assertEquals(0, emptySubtitle.getEventTimeCount());
|
||||||
@ -72,29 +95,29 @@ public class WebvttSubtitleTest extends TestCase {
|
|||||||
|
|
||||||
public void testSimpleSubtitleText() {
|
public void testSimpleSubtitleText() {
|
||||||
// Test before first subtitle
|
// Test before first subtitle
|
||||||
assertNull(simpleSubtitle.getText(0));
|
assertSingleCueEmpty(simpleSubtitle.getCues(0));
|
||||||
assertNull(simpleSubtitle.getText(500000));
|
assertSingleCueEmpty(simpleSubtitle.getCues(500000));
|
||||||
assertNull(simpleSubtitle.getText(999999));
|
assertSingleCueEmpty(simpleSubtitle.getCues(999999));
|
||||||
|
|
||||||
// Test first subtitle
|
// Test first subtitle
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getText(1000000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getCues(1000000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getText(1500000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getCues(1500000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getText(1999999));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, simpleSubtitle.getCues(1999999));
|
||||||
|
|
||||||
// Test after first subtitle, before second subtitle
|
// Test after first subtitle, before second subtitle
|
||||||
assertNull(simpleSubtitle.getText(2000000));
|
assertSingleCueEmpty(simpleSubtitle.getCues(2000000));
|
||||||
assertNull(simpleSubtitle.getText(2500000));
|
assertSingleCueEmpty(simpleSubtitle.getCues(2500000));
|
||||||
assertNull(simpleSubtitle.getText(2999999));
|
assertSingleCueEmpty(simpleSubtitle.getCues(2999999));
|
||||||
|
|
||||||
// Test second subtitle
|
// Test second subtitle
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getText(3000000));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getCues(3000000));
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getText(3500000));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getCues(3500000));
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getText(3999999));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, simpleSubtitle.getCues(3999999));
|
||||||
|
|
||||||
// Test after second subtitle
|
// Test after second subtitle
|
||||||
assertNull(simpleSubtitle.getText(4000000));
|
assertSingleCueEmpty(simpleSubtitle.getCues(4000000));
|
||||||
assertNull(simpleSubtitle.getText(4500000));
|
assertSingleCueEmpty(simpleSubtitle.getCues(4500000));
|
||||||
assertNull(simpleSubtitle.getText(Long.MAX_VALUE));
|
assertSingleCueEmpty(simpleSubtitle.getCues(Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOverlappingSubtitleEventTimes() {
|
public void testOverlappingSubtitleEventTimes() {
|
||||||
@ -107,29 +130,32 @@ public class WebvttSubtitleTest extends TestCase {
|
|||||||
|
|
||||||
public void testOverlappingSubtitleText() {
|
public void testOverlappingSubtitleText() {
|
||||||
// Test before first subtitle
|
// Test before first subtitle
|
||||||
assertNull(overlappingSubtitle.getText(0));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(0));
|
||||||
assertNull(overlappingSubtitle.getText(500000));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(500000));
|
||||||
assertNull(overlappingSubtitle.getText(999999));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(999999));
|
||||||
|
|
||||||
// Test first subtitle
|
// Test first subtitle
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getText(1000000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getCues(1000000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getText(1500000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getCues(1500000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getText(1999999));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, overlappingSubtitle.getCues(1999999));
|
||||||
|
|
||||||
// Test after first and second subtitle
|
// Test after first and second subtitle
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(2000000));
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING,
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(2500000));
|
overlappingSubtitle.getCues(2000000));
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(2999999));
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING,
|
||||||
|
overlappingSubtitle.getCues(2500000));
|
||||||
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING,
|
||||||
|
overlappingSubtitle.getCues(2999999));
|
||||||
|
|
||||||
// Test second subtitle
|
// Test second subtitle
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(3000000));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getCues(3000000));
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(3500000));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getCues(3500000));
|
||||||
assertEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getText(3999999));
|
assertSingleCueTextEquals(SECOND_SUBTITLE_STRING, overlappingSubtitle.getCues(3999999));
|
||||||
|
|
||||||
// Test after second subtitle
|
// Test after second subtitle
|
||||||
assertNull(overlappingSubtitle.getText(4000000));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(4000000));
|
||||||
assertNull(overlappingSubtitle.getText(4500000));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(4500000));
|
||||||
assertNull(overlappingSubtitle.getText(Long.MAX_VALUE));
|
assertSingleCueEmpty(overlappingSubtitle.getCues(Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNestedSubtitleEventTimes() {
|
public void testNestedSubtitleEventTimes() {
|
||||||
@ -142,29 +168,29 @@ public class WebvttSubtitleTest extends TestCase {
|
|||||||
|
|
||||||
public void testNestedSubtitleText() {
|
public void testNestedSubtitleText() {
|
||||||
// Test before first subtitle
|
// Test before first subtitle
|
||||||
assertNull(nestedSubtitle.getText(0));
|
assertSingleCueEmpty(nestedSubtitle.getCues(0));
|
||||||
assertNull(nestedSubtitle.getText(500000));
|
assertSingleCueEmpty(nestedSubtitle.getCues(500000));
|
||||||
assertNull(nestedSubtitle.getText(999999));
|
assertSingleCueEmpty(nestedSubtitle.getCues(999999));
|
||||||
|
|
||||||
// Test first subtitle
|
// Test first subtitle
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(1000000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(1000000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(1500000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(1500000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(1999999));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(1999999));
|
||||||
|
|
||||||
// Test after first and second subtitle
|
// Test after first and second subtitle
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getText(2000000));
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getCues(2000000));
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getText(2500000));
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getCues(2500000));
|
||||||
assertEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getText(2999999));
|
assertSingleCueTextEquals(FIRST_AND_SECOND_SUBTITLE_STRING, nestedSubtitle.getCues(2999999));
|
||||||
|
|
||||||
// Test first subtitle
|
// Test first subtitle
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(3000000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(3000000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(3500000));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(3500000));
|
||||||
assertEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getText(3999999));
|
assertSingleCueTextEquals(FIRST_SUBTITLE_STRING, nestedSubtitle.getCues(3999999));
|
||||||
|
|
||||||
// Test after second subtitle
|
// Test after second subtitle
|
||||||
assertNull(nestedSubtitle.getText(4000000));
|
assertSingleCueEmpty(nestedSubtitle.getCues(4000000));
|
||||||
assertNull(nestedSubtitle.getText(4500000));
|
assertSingleCueEmpty(nestedSubtitle.getCues(4500000));
|
||||||
assertNull(nestedSubtitle.getText(Long.MAX_VALUE));
|
assertSingleCueEmpty(nestedSubtitle.getCues(Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void testSubtitleEventTimesHelper(WebvttSubtitle subtitle) {
|
private void testSubtitleEventTimesHelper(WebvttSubtitle subtitle) {
|
||||||
@ -201,4 +227,13 @@ public class WebvttSubtitleTest extends TestCase {
|
|||||||
assertEquals(-1, subtitle.getNextEventTimeIndex(Long.MAX_VALUE));
|
assertEquals(-1, subtitle.getNextEventTimeIndex(Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void assertSingleCueEmpty(List<Cue> cues) {
|
||||||
|
assertTrue(cues.size() == 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void assertSingleCueTextEquals(String expected, List<Cue> cues) {
|
||||||
|
assertTrue(cues.size() == 1);
|
||||||
|
assertEquals(expected, cues.get(0).text.toString());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user