Merge pull request #199 from google/dev

Release 1.1.0
This commit is contained in:
ojw28 2014-12-10 16:52:40 +00:00
commit 34372deb9a
107 changed files with 7917 additions and 2117 deletions

View File

@ -1,4 +1,4 @@
# How to contribute #
# How to Contribute #
We'd love to hear your feedback. Please open new issues describing any bugs,
feature requests or suggestions that you have.

View File

@ -4,7 +4,7 @@
ExoPlayer is an application level media player for Android. It provides an
alternative to Androids MediaPlayer API for playing audio and video both
locally and over the internet. ExoPlayer supports features not currently
locally and over the Internet. ExoPlayer supports features not currently
supported by Androids MediaPlayer API (as of KitKat), including DASH and
SmoothStreaming adaptive playbacks, persistent caching and custom renderers.
Unlike the MediaPlayer API, ExoPlayer is easy to customize and extend, and

View File

@ -19,7 +19,7 @@ buildscript {
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:0.12.+'
classpath 'com.android.tools.build:gradle:1.0.0-rc1'
}
}

View File

@ -11,19 +11,19 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
apply plugin: 'android'
apply plugin: 'com.android.application'
android {
compileSdkVersion 19
buildToolsVersion "19.1"
compileSdkVersion 21
buildToolsVersion "21.1.1"
defaultConfig {
minSdkVersion 16
targetSdkVersion 19
targetSdkVersion 21
}
buildTypes {
release {
runProguard false
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}

View File

@ -4,7 +4,7 @@
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.DEPENDENCIES"/>
<classpathentry combineaccessrules="false" kind="src" path="/ExoPlayerLib"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="src" path="java"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -16,8 +16,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.android.exoplayer.demo"
android:versionCode="1013"
android:versionName="1.0.13"
android:versionCode="1100"
android:versionName="1.1.00"
android:theme="@style/RootTheme">
<uses-permission android:name="android.permission.INTERNET"/>
@ -25,11 +25,12 @@
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="19"/>
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21"/>
<application
android:label="@string/application_name"
android:allowBackup="true">
android:largeHeap="true"
android:allowBackup="false">
<activity android:name="com.google.android.exoplayer.demo.SampleChooserActivity"
android:configChanges="keyboardHidden"

View File

@ -28,6 +28,9 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookiePolicy;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
@ -44,12 +47,19 @@ public class DemoUtil {
public static final String CONTENT_TYPE_EXTRA = "content_type";
public static final String CONTENT_ID_EXTRA = "content_id";
public static final int TYPE_DASH_VOD = 0;
public static final int TYPE_SS_VOD = 1;
public static final int TYPE_DASH = 0;
public static final int TYPE_SS = 1;
public static final int TYPE_OTHER = 2;
public static final boolean EXPOSE_EXPERIMENTAL_FEATURES = false;
private static final CookieManager defaultCookieManager;
static {
defaultCookieManager = new CookieManager();
defaultCookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ORIGINAL_SERVER);
}
public static String getUserAgent(Context context) {
String versionName;
try {
@ -105,4 +115,11 @@ public class DemoUtil {
return bytes;
}
public static void setDefaultCookieManager() {
CookieHandler currentHandler = CookieHandler.getDefault();
if (currentHandler != defaultCookieManager) {
CookieHandler.setDefault(defaultCookieManager);
}
}
}

View File

@ -46,17 +46,17 @@ package com.google.android.exoplayer.demo;
"http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=255F6B3C07C753C88708C07EA31B7A1A10703C8D."
+ "2D6A28B21F921D0B245CDCF36F7EB54A2B5ABFC2&key=ik0", DemoUtil.TYPE_DASH_VOD, false,
+ "2D6A28B21F921D0B245CDCF36F7EB54A2B5ABFC2&key=ik0", DemoUtil.TYPE_DASH, false,
false),
new Sample("Google Play (DASH)", "3aa39fa2cc27967f",
"http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0&"
+ "expire=19000000000&signature=7181C59D0252B285D593E1B61D985D5B7C98DE2A."
+ "5B445837F55A40E0F28AACAA047982E372D177E2&key=ik0", DemoUtil.TYPE_DASH_VOD, false,
+ "5B445837F55A40E0F28AACAA047982E372D177E2&key=ik0", DemoUtil.TYPE_DASH, false,
false),
new Sample("Super speed (SmoothStreaming)", "uid:ss:superspeed",
"http://playready.directtaps.net/smoothstreaming/SSWSS720H264/SuperSpeedway_720.ism",
DemoUtil.TYPE_SS_VOD, false, false),
DemoUtil.TYPE_SS, false, false),
new Sample("Dizzy (Misc)", "uid:misc:dizzy",
"http://html5demos.com/assets/dizzy.mp4", DemoUtil.TYPE_OTHER, false, false),
};
@ -66,13 +66,13 @@ package com.google.android.exoplayer.demo;
"http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&"
+ "ipbits=0&expire=19000000000&signature=255F6B3C07C753C88708C07EA31B7A1A10703C8D."
+ "2D6A28B21F921D0B245CDCF36F7EB54A2B5ABFC2&key=ik0", DemoUtil.TYPE_DASH_VOD, false,
+ "2D6A28B21F921D0B245CDCF36F7EB54A2B5ABFC2&key=ik0", DemoUtil.TYPE_DASH, false,
true),
new Sample("Google Play", "3aa39fa2cc27967f",
"http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?"
+ "as=fmp4_audio_clear,fmp4_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0&"
+ "expire=19000000000&signature=7181C59D0252B285D593E1B61D985D5B7C98DE2A."
+ "5B445837F55A40E0F28AACAA047982E372D177E2&key=ik0", DemoUtil.TYPE_DASH_VOD, false,
+ "5B445837F55A40E0F28AACAA047982E372D177E2&key=ik0", DemoUtil.TYPE_DASH, false,
true),
};
@ -81,21 +81,21 @@ package com.google.android.exoplayer.demo;
"http://www.youtube.com/api/manifest/dash/id/bf5bb2419360daf1/source/youtube?"
+ "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0&"
+ "expire=19000000000&signature=A3EC7EE53ABE601B357F7CAB8B54AD0702CA85A7."
+ "446E9C38E47E3EDAF39E0163C390FF83A7944918&key=ik0", DemoUtil.TYPE_DASH_VOD, false, true),
+ "446E9C38E47E3EDAF39E0163C390FF83A7944918&key=ik0", DemoUtil.TYPE_DASH, false, true),
new Sample("Google Play", "3aa39fa2cc27967f",
"http://www.youtube.com/api/manifest/dash/id/3aa39fa2cc27967f/source/youtube?"
+ "as=fmp4_audio_clear,webm2_sd_hd_clear&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0&"
+ "expire=19000000000&signature=B752B262C6D7262EC4E4EB67901E5D8F7058A81D."
+ "C0358CE1E335417D9A8D88FF192F0D5D8F6DA1B6&key=ik0", DemoUtil.TYPE_DASH_VOD, false, true),
+ "C0358CE1E335417D9A8D88FF192F0D5D8F6DA1B6&key=ik0", DemoUtil.TYPE_DASH, false, true),
};
public static final Sample[] SMOOTHSTREAMING = new Sample[] {
new Sample("Super speed", "uid:ss:superspeed",
"http://playready.directtaps.net/smoothstreaming/SSWSS720H264/SuperSpeedway_720.ism",
DemoUtil.TYPE_SS_VOD, false, true),
DemoUtil.TYPE_SS, false, true),
new Sample("Super speed (PlayReady)", "uid:ss:pr:superspeed",
"http://playready.directtaps.net/smoothstreaming/SSWSS720H264PR/SuperSpeedway_720.ism",
DemoUtil.TYPE_SS_VOD, true, true),
DemoUtil.TYPE_SS, true, true),
};
public static final Sample[] WIDEVINE_GTS = new Sample[] {
@ -103,32 +103,32 @@ package com.google.android.exoplayer.demo;
"http://www.youtube.com/api/manifest/dash/id/d286538032258a1c/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=41EA40A027A125A16292E0A5E3277A3B5FA9B938."
+ "0BB075C396FFDDC97E526E8F77DC26FF9667D0D6&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "0BB075C396FFDDC97E526E8F77DC26FF9667D0D6&key=ik0", DemoUtil.TYPE_DASH, true, true),
new Sample("WV: HDCP not required", "48fcc369939ac96c",
"http://www.youtube.com/api/manifest/dash/id/48fcc369939ac96c/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=315911BDCEED0FB0C763455BDCC97449DAAFA9E8."
+ "5B41E2EB411F797097A359D6671D2CDE26272373&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "5B41E2EB411F797097A359D6671D2CDE26272373&key=ik0", DemoUtil.TYPE_DASH, true, true),
new Sample("WV: HDCP required", "e06c39f1151da3df",
"http://www.youtube.com/api/manifest/dash/id/e06c39f1151da3df/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=A47A1E13E7243BD567601A75F79B34644D0DC592."
+ "B09589A34FA23527EFC1552907754BB8033870BD&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "B09589A34FA23527EFC1552907754BB8033870BD&key=ik0", DemoUtil.TYPE_DASH, true, true),
new Sample("WV: Secure video path required", "0894c7c8719b28a0",
"http://www.youtube.com/api/manifest/dash/id/0894c7c8719b28a0/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=2847EE498970F6B45176766CD2802FEB4D4CB7B2."
+ "A1CA51EC40A1C1039BA800C41500DD448C03EEDA&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "A1CA51EC40A1C1039BA800C41500DD448C03EEDA&key=ik0", DemoUtil.TYPE_DASH, true, true),
new Sample("WV: HDCP + secure video path required", "efd045b1eb61888a",
"http://www.youtube.com/api/manifest/dash/id/efd045b1eb61888a/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=61611F115EEEC7BADE5536827343FFFE2D83D14F."
+ "2FDF4BFA502FB5865C5C86401314BDDEA4799BD0&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "2FDF4BFA502FB5865C5C86401314BDDEA4799BD0&key=ik0", DemoUtil.TYPE_DASH, true, true),
new Sample("WV: 30s license duration", "f9a34cab7b05881a",
"http://www.youtube.com/api/manifest/dash/id/f9a34cab7b05881a/source/youtube?"
+ "as=fmp4_audio_cenc,fmp4_sd_hd_cenc&sparams=ip,ipbits,expire,as&ip=0.0.0.0&ipbits=0"
+ "&expire=19000000000&signature=88DC53943385CED8CF9F37ADD9E9843E3BF621E6."
+ "22727BB612D24AA4FACE4EF62726F9461A9BF57A&key=ik0", DemoUtil.TYPE_DASH_VOD, true, true),
+ "22727BB612D24AA4FACE4EF62726F9461A9BF57A&key=ik0", DemoUtil.TYPE_DASH, true, true),
};
public static final Sample[] MISC = new Sample[] {
@ -136,6 +136,9 @@ package com.google.android.exoplayer.demo;
DemoUtil.TYPE_OTHER, false, true),
new Sample("Dizzy (https->http redirect)", "uid:misc:dizzy2", "https://goo.gl/MtUDEj",
DemoUtil.TYPE_OTHER, false, true),
new Sample("Apple AAC 10s", "uid:misc:appleaacseg", "https://devimages.apple.com.edgekey.net/"
+ "streaming/examples/bipbop_4x3/gear0/fileSequence0.aac",
DemoUtil.TYPE_OTHER, false, true),
};
private Samples() {}

View File

@ -16,8 +16,8 @@
package com.google.android.exoplayer.demo.full;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer.AudioTrackInitializationException;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.demo.full.player.DemoPlayer;
import com.google.android.exoplayer.util.VerboseLogUtil;
@ -73,8 +73,8 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
}
@Override
public void onVideoSizeChanged(int width, int height) {
Log.d(TAG, "videoSizeChanged [" + width + ", " + height + "]");
public void onVideoSizeChanged(int width, int height, float pixelWidthHeightRatio) {
Log.d(TAG, "videoSizeChanged [" + width + ", " + height + ", " + pixelWidthHeightRatio + "]");
}
// DemoPlayer.InfoListener
@ -149,7 +149,7 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
}
@Override
public void onAudioTrackInitializationError(AudioTrackInitializationException e) {
public void onAudioTrackInitializationError(AudioTrack.InitializationException e) {
printInternalError("audioTrackInitializationError", e);
}

View File

@ -19,18 +19,25 @@ import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.VideoSurfaceView;
import com.google.android.exoplayer.demo.DemoUtil;
import com.google.android.exoplayer.demo.R;
import com.google.android.exoplayer.demo.full.player.DashVodRendererBuilder;
import com.google.android.exoplayer.demo.full.player.DashRendererBuilder;
import com.google.android.exoplayer.demo.full.player.DefaultRendererBuilder;
import com.google.android.exoplayer.demo.full.player.DemoPlayer;
import com.google.android.exoplayer.demo.full.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.full.player.SmoothStreamingRendererBuilder;
import com.google.android.exoplayer.text.CaptionStyleCompat;
import com.google.android.exoplayer.text.SubtitleView;
import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.VerboseLogUtil;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Point;
import android.net.Uri;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.Display;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
@ -38,6 +45,8 @@ import android.view.SurfaceHolder;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.view.accessibility.CaptioningManager;
import android.widget.Button;
import android.widget.MediaController;
import android.widget.PopupMenu;
@ -50,6 +59,7 @@ import android.widget.TextView;
public class FullPlayerActivity extends Activity implements SurfaceHolder.Callback, OnClickListener,
DemoPlayer.Listener, DemoPlayer.TextListener {
private static final float CAPTION_LINE_HEIGHT_RATIO = 0.0533f;
private static final int MENU_GROUP_TRACKS = 1;
private static final int ID_OFFSET = 2;
@ -60,7 +70,7 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
private VideoSurfaceView surfaceView;
private TextView debugTextView;
private TextView playerStateTextView;
private TextView subtitlesTextView;
private SubtitleView subtitleView;
private Button videoButton;
private Button audioButton;
private Button textButton;
@ -70,7 +80,7 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
private boolean playerNeedsPrepare;
private boolean autoPlay = true;
private int playerPosition;
private long playerPosition;
private boolean enableBackgroundAudio = false;
private Uri contentUri;
@ -108,7 +118,7 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
debugTextView = (TextView) findViewById(R.id.debug_text_view);
playerStateTextView = (TextView) findViewById(R.id.player_state_view);
subtitlesTextView = (TextView) findViewById(R.id.subtitles);
subtitleView = (SubtitleView) findViewById(R.id.subtitles);
mediaController = new MediaController(this);
mediaController.setAnchorView(root);
@ -117,11 +127,14 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
videoButton = (Button) findViewById(R.id.video_controls);
audioButton = (Button) findViewById(R.id.audio_controls);
textButton = (Button) findViewById(R.id.text_controls);
DemoUtil.setDefaultCookieManager();
}
@Override
public void onResume() {
super.onResume();
configureSubtitleView();
preparePlayer();
}
@ -156,11 +169,11 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
private RendererBuilder getRendererBuilder() {
String userAgent = DemoUtil.getUserAgent(this);
switch (contentType) {
case DemoUtil.TYPE_SS_VOD:
case DemoUtil.TYPE_SS:
return new SmoothStreamingRendererBuilder(userAgent, contentUri.toString(), contentId,
new SmoothStreamingTestMediaDrmCallback(), debugTextView);
case DemoUtil.TYPE_DASH_VOD:
return new DashVodRendererBuilder(userAgent, contentUri.toString(), contentId,
case DemoUtil.TYPE_DASH:
return new DashRendererBuilder(userAgent, contentUri.toString(), contentId,
new WidevineTestMediaDrmCallback(contentId), debugTextView);
default:
return new DefaultRendererBuilder(this, contentUri, debugTextView);
@ -249,9 +262,10 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
}
@Override
public void onVideoSizeChanged(int width, int height) {
public void onVideoSizeChanged(int width, int height, float pixelWidthAspectRatio) {
shutterView.setVisibility(View.GONE);
surfaceView.setVideoWidthHeightRatio(height == 0 ? 1 : (float) width / height);
surfaceView.setVideoWidthHeightRatio(
height == 0 ? 1 : (width * pixelWidthAspectRatio) / height);
}
// User controls
@ -380,10 +394,10 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
@Override
public void onText(String text) {
if (TextUtils.isEmpty(text)) {
subtitlesTextView.setVisibility(View.INVISIBLE);
subtitleView.setVisibility(View.INVISIBLE);
} else {
subtitlesTextView.setVisibility(View.VISIBLE);
subtitlesTextView.setText(text);
subtitleView.setVisibility(View.VISIBLE);
subtitleView.setText(text);
}
}
@ -409,4 +423,40 @@ public class FullPlayerActivity extends Activity implements SurfaceHolder.Callba
}
}
private void configureSubtitleView() {
CaptionStyleCompat captionStyle;
float captionTextSize = getCaptionFontSize();
if (Util.SDK_INT >= 19) {
captionStyle = getUserCaptionStyleV19();
captionTextSize *= getUserCaptionFontScaleV19();
} else {
captionStyle = CaptionStyleCompat.DEFAULT;
}
subtitleView.setStyle(captionStyle);
subtitleView.setTextSize(captionTextSize);
}
private float getCaptionFontSize() {
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE))
.getDefaultDisplay();
Point displaySize = new Point();
display.getSize(displaySize);
return Math.max(getResources().getDimension(R.dimen.subtitle_minimum_font_size),
CAPTION_LINE_HEIGHT_RATIO * Math.min(displaySize.x, displaySize.y));
}
@TargetApi(19)
private float getUserCaptionFontScaleV19() {
CaptioningManager captioningManager =
(CaptioningManager) getSystemService(Context.CAPTIONING_SERVICE);
return captioningManager.getFontScale();
}
@TargetApi(19)
private CaptionStyleCompat getUserCaptionStyleV19() {
CaptioningManager captioningManager =
(CaptioningManager) getSystemService(Context.CAPTIONING_SERVICE);
return CaptionStyleCompat.createFromCaptionStyle(captioningManager.getUserStyle());
}
}

View File

@ -31,7 +31,7 @@ import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.demo.DemoUtil;
@ -40,10 +40,13 @@ import com.google.android.exoplayer.demo.full.player.DemoPlayer.RendererBuilderC
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.drm.MediaDrmCallback;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.webvtt.WebvttParser;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.HttpDataSource;
import com.google.android.exoplayer.upstream.UriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
@ -51,22 +54,25 @@ import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.UnsupportedSchemeException;
import android.os.AsyncTask;
import android.os.Handler;
import android.util.Pair;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A {@link RendererBuilder} for DASH VOD.
* A {@link RendererBuilder} for DASH.
*/
public class DashVodRendererBuilder implements RendererBuilder,
public class DashRendererBuilder implements RendererBuilder,
ManifestCallback<MediaPresentationDescription> {
private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 60;
private static final int TEXT_BUFFER_SEGMENTS = 2;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private static final int SECURITY_LEVEL_UNKNOWN = -1;
private static final int SECURITY_LEVEL_1 = 1;
@ -80,8 +86,9 @@ public class DashVodRendererBuilder implements RendererBuilder,
private DemoPlayer player;
private RendererBuilderCallback callback;
private ManifestFetcher<MediaPresentationDescription> manifestFetcher;
public DashVodRendererBuilder(String userAgent, String url, String contentId,
public DashRendererBuilder(String userAgent, String url, String contentId,
MediaDrmCallback drmCallback, TextView debugTextView) {
this.userAgent = userAgent;
this.url = url;
@ -94,49 +101,30 @@ public class DashVodRendererBuilder implements RendererBuilder,
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
this.player = player;
this.callback = callback;
MediaPresentationDescriptionFetcher mpdFetcher = new MediaPresentationDescriptionFetcher(this);
mpdFetcher.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, url, contentId);
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
manifestFetcher = new ManifestFetcher<MediaPresentationDescription>(parser, contentId, url,
userAgent);
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
}
@Override
public void onManifestError(String contentId, Exception e) {
public void onManifestError(String contentId, IOException e) {
callback.onRenderersError(e);
}
@Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Period period = manifest.periods.get(0);
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Obtain Representations for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
ArrayList<Representation> audioRepresentationsList = new ArrayList<Representation>();
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
hasContentProtection |= adaptationSet.hasContentProtection();
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentationsList.add(representation);
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
AdaptationSet videoAdaptationSet = period.adaptationSets.get(videoAdaptationSetIndex);
// Check drm support if necessary.
boolean hasContentProtection = videoAdaptationSet.hasContentProtection();
boolean filterHdContent = false;
DrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
@ -148,55 +136,81 @@ public class DashVodRendererBuilder implements RendererBuilder,
Pair<DrmSessionManager, Boolean> drmSessionManagerData =
V18Compat.getDrmSessionManagerData(player, drmCallback);
drmSessionManager = drmSessionManagerData.first;
if (!drmSessionManagerData.second) {
// HD streams require L1 security.
videoRepresentations = getSdRepresentations(videoRepresentations);
}
// HD streams require L1 security.
filterHdContent = !drmSessionManagerData.second;
} catch (Exception e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource;
String mimeType = videoRepresentations[0].format.mimeType;
if (mimeType.equals(MimeTypes.VIDEO_MP4) || mimeType.equals(MimeTypes.VIDEO_WEBM)) {
videoChunkSource = new DashChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
} else {
throw new IllegalStateException("Unexpected mime type: " + mimeType);
// Determine which video representations we should use for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
List<Representation> videoRepresentations = videoAdaptationSet.representations;
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
for (int i = 0; i < videoRepresentations.size(); i++) {
Format format = videoRepresentations.get(i).format;
if (filterHdContent && (format.width >= 1280 || format.height >= 720)) {
// Filtering HD content
} else if (format.width * format.height > maxDecodableFrameSize) {
// Filtering stream that device cannot play
} else if (!format.mimeType.equals(MimeTypes.VIDEO_MP4)
&& !format.mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// Filtering unsupported mime type
} else {
videoRepresentationIndexList.add(i);
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
final TrackRenderer debugRenderer;
if (videoRepresentationIndexList.isEmpty()) {
videoRenderer = null;
debugRenderer = null;
} else {
int[] videoRepresentationIndices = Util.toArray(videoRepresentationIndexList);
DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, videoAdaptationSetIndex,
videoRepresentationIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter),
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, drmSessionManager, true,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50);
debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
}
// Build the audio chunk sources.
int audioAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_AUDIO);
AdaptationSet audioAdaptationSet = period.adaptationSets.get(audioAdaptationSetIndex);
DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter);
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
List<ChunkSource> audioChunkSourceList = new ArrayList<ChunkSource>();
List<String> audioTrackNameList = new ArrayList<String>();
List<Representation> audioRepresentations = audioAdaptationSet.representations;
for (int i = 0; i < audioRepresentations.size(); i++) {
Format format = audioRepresentations.get(i).format;
audioTrackNameList.add(format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)");
audioChunkSourceList.add(new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {i}, audioDataSource, audioEvaluator, LIVE_EDGE_LATENCY_MS));
}
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
// Build the audio renderer.
final String[] audioTrackNames;
final MultiTrackChunkSource audioChunkSource;
final MediaCodecAudioTrackRenderer audioRenderer;
if (audioRepresentationsList.isEmpty()) {
final TrackRenderer audioRenderer;
if (audioChunkSourceList.isEmpty()) {
audioTrackNames = null;
audioChunkSource = null;
audioRenderer = null;
} else {
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
audioTrackNames = new String[audioRepresentationsList.size()];
ChunkSource[] audioChunkSources = new ChunkSource[audioRepresentationsList.size()];
FormatEvaluator audioEvaluator = new FormatEvaluator.FixedEvaluator();
for (int i = 0; i < audioRepresentationsList.size(); i++) {
Representation representation = audioRepresentationsList.get(i);
Format format = representation.format;
audioTrackNames[i] = format.id + " (" + format.numChannels + "ch, " +
format.audioSamplingRate + "Hz)";
audioChunkSources[i] = new DashChunkSource(audioDataSource,
audioEvaluator, representation);
}
audioChunkSource = new MultiTrackChunkSource(audioChunkSources);
audioTrackNames = new String[audioTrackNameList.size()];
audioTrackNameList.toArray(audioTrackNames);
audioChunkSource = new MultiTrackChunkSource(audioChunkSourceList);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
@ -204,45 +218,69 @@ public class DashVodRendererBuilder implements RendererBuilder,
mainHandler, player);
}
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer, videoSampleSource) : null;
// Build the text chunk sources.
DataSource textDataSource = new UriDataSource(userAgent, bandwidthMeter);
FormatEvaluator textEvaluator = new FormatEvaluator.FixedEvaluator();
List<ChunkSource> textChunkSourceList = new ArrayList<ChunkSource>();
List<String> textTrackNameList = new ArrayList<String>();
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type == AdaptationSet.TYPE_TEXT) {
List<Representation> representations = adaptationSet.representations;
for (int j = 0; j < representations.size(); j++) {
Representation representation = representations.get(j);
textTrackNameList.add(representation.format.id);
textChunkSourceList.add(new DashChunkSource(manifestFetcher, i, new int[] {j},
textDataSource, textEvaluator, LIVE_EDGE_LATENCY_MS));
}
}
}
// Build the text renderers
final String[] textTrackNames;
final MultiTrackChunkSource textChunkSource;
final TrackRenderer textRenderer;
if (textChunkSourceList.isEmpty()) {
textTrackNames = null;
textChunkSource = null;
textRenderer = null;
} else {
textTrackNames = new String[textTrackNameList.size()];
textTrackNameList.toArray(textTrackNames);
textChunkSource = new MultiTrackChunkSource(textChunkSourceList);
SampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(textSampleSource, new WebvttParser(), player,
mainHandler.getLooper());
}
// Invoke the callback.
String[][] trackNames = new String[DemoPlayer.RENDERER_COUNT][];
trackNames[DemoPlayer.TYPE_AUDIO] = audioTrackNames;
trackNames[DemoPlayer.TYPE_TEXT] = textTrackNames;
MultiTrackChunkSource[] multiTrackChunkSources =
new MultiTrackChunkSource[DemoPlayer.RENDERER_COUNT];
multiTrackChunkSources[DemoPlayer.TYPE_AUDIO] = audioChunkSource;
multiTrackChunkSources[DemoPlayer.TYPE_TEXT] = textChunkSource;
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
}
private Representation[] getSdRepresentations(Representation[] representations) {
ArrayList<Representation> sdRepresentations = new ArrayList<Representation>();
for (int i = 0; i < representations.length; i++) {
if (representations[i].format.height < 720 && representations[i].format.width < 1280) {
sdRepresentations.add(representations[i]);
}
}
Representation[] sdRepresentationArray = new Representation[sdRepresentations.size()];
sdRepresentations.toArray(sdRepresentationArray);
return sdRepresentationArray;
}
@TargetApi(18)
private static class V18Compat {
public static Pair<DrmSessionManager, Boolean> getDrmSessionManagerData(DemoPlayer player,
MediaDrmCallback drmCallback) throws UnsupportedSchemeException {
StreamingDrmSessionManager streamingDrmSessionManager = new StreamingDrmSessionManager(
DemoUtil.WIDEVINE_UUID, player.getPlaybackLooper(), drmCallback, player.getMainHandler(),
player);
DemoUtil.WIDEVINE_UUID, player.getPlaybackLooper(), drmCallback, null,
player.getMainHandler(), player);
return Pair.create((DrmSessionManager) streamingDrmSessionManager,
getWidevineSecurityLevel(streamingDrmSessionManager) == SECURITY_LEVEL_1);
}

View File

@ -68,10 +68,10 @@ import android.widget.TextView;
}
@Override
protected void doSomeWork(long timeUs) throws ExoPlaybackException {
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
maybeFail();
if (timeUs < currentPositionUs || timeUs > currentPositionUs + 1000000) {
currentPositionUs = timeUs;
if (positionUs < currentPositionUs || positionUs > currentPositionUs + 1000000) {
currentPositionUs = positionUs;
textView.post(this);
}
}

View File

@ -48,8 +48,8 @@ public class DefaultRendererBuilder implements RendererBuilder {
// Build the video and audio renderers.
FrameworkSampleSource sampleSource = new FrameworkSampleSource(context, uri, null, 2);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
player.getMainHandler(), player, 50);
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player);

View File

@ -19,10 +19,10 @@ import com.google.android.exoplayer.DummyTrackRenderer;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer.AudioTrackInitializationException;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.drm.StreamingDrmSessionManager;
@ -93,7 +93,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
public interface Listener {
void onStateChanged(boolean playWhenReady, int playbackState);
void onError(Exception e);
void onVideoSizeChanged(int width, int height);
void onVideoSizeChanged(int width, int height, float pixelWidthHeightRatio);
}
/**
@ -106,7 +106,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
*/
public interface InternalErrorListener {
void onRendererInitializationError(Exception e);
void onAudioTrackInitializationError(AudioTrackInitializationException e);
void onAudioTrackInitializationError(AudioTrack.InitializationException e);
void onDecoderInitializationError(DecoderInitializationException e);
void onCryptoError(CryptoException e);
void onUpstreamError(int sourceId, IOException e);
@ -131,7 +131,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
* A listener for receiving notifications of timed text.
*/
public interface TextListener {
public abstract void onText(String text);
void onText(String text);
}
// Constants pulled into this class for convenience.
@ -287,7 +287,6 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
this.trackNames = trackNames;
this.multiTrackSources = multiTrackSources;
rendererBuildingState = RENDERER_BUILDING_STATE_BUILT;
maybeReportPlayerState();
pushSurfaceAndVideoTrack(false);
pushTrackSelection(TYPE_AUDIO, true);
pushTrackSelection(TYPE_TEXT, true);
@ -310,7 +309,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
player.setPlayWhenReady(playWhenReady);
}
public void seekTo(int positionMs) {
public void seekTo(long positionMs) {
player.seekTo(positionMs);
}
@ -339,11 +338,11 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
return playerState;
}
public int getCurrentPosition() {
public long getCurrentPosition() {
return player.getCurrentPosition();
}
public int getDuration() {
public long getDuration() {
return player.getDuration();
}
@ -377,9 +376,9 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
@Override
public void onVideoSizeChanged(int width, int height) {
public void onVideoSizeChanged(int width, int height, float pixelWidthHeightRatio) {
for (Listener listener : listeners) {
listener.onVideoSizeChanged(width, height);
listener.onVideoSizeChanged(width, height, pixelWidthHeightRatio);
}
}
@ -425,7 +424,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
@Override
public void onAudioTrackInitializationError(AudioTrackInitializationException e) {
public void onAudioTrackInitializationError(AudioTrack.InitializationException e) {
if (internalErrorListener != null) {
internalErrorListener.onAudioTrackInitializationError(e);
}

View File

@ -35,14 +35,14 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestFetcher;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
import com.google.android.exoplayer.text.TextTrackRenderer;
import com.google.android.exoplayer.text.ttml.TtmlParser;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.HttpDataSource;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.upstream.UriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
@ -51,6 +51,7 @@ import android.media.UnsupportedSchemeException;
import android.os.Handler;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.UUID;
@ -58,12 +59,13 @@ import java.util.UUID;
* A {@link RendererBuilder} for SmoothStreaming.
*/
public class SmoothStreamingRendererBuilder implements RendererBuilder,
ManifestCallback<SmoothStreamingManifest> {
ManifestFetcher.ManifestCallback<SmoothStreamingManifest> {
private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 60;
private static final int TTML_BUFFER_SEGMENTS = 2;
private static final int TEXT_BUFFER_SEGMENTS = 2;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final String userAgent;
private final String url;
@ -73,6 +75,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
private DemoPlayer player;
private RendererBuilderCallback callback;
private ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
public SmoothStreamingRendererBuilder(String userAgent, String url, String contentId,
MediaDrmCallback drmCallback, TextView debugTextView) {
@ -87,13 +90,15 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
this.player = player;
this.callback = callback;
SmoothStreamingManifestFetcher mpdFetcher = new SmoothStreamingManifestFetcher(this);
mpdFetcher.execute(url + "/Manifest", contentId);
SmoothStreamingManifestParser parser = new SmoothStreamingManifestParser();
manifestFetcher = new ManifestFetcher<SmoothStreamingManifest>(parser, contentId,
url + "/Manifest", userAgent);
manifestFetcher.singleLoad(player.getMainHandler().getLooper(), this);
}
@Override
public void onManifestError(String contentId, Exception e) {
callback.onRenderersError(e);
public void onManifestError(String contentId, IOException exception) {
callback.onRenderersError(exception);
}
@Override
@ -144,21 +149,18 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
}
}
}
int[] videoTrackIndices = new int[videoTrackIndexList.size()];
for (int i = 0; i < videoTrackIndexList.size(); i++) {
videoTrackIndices[i] = videoTrackIndexList.get(i);
}
int[] videoTrackIndices = Util.toArray(videoTrackIndexList);
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(url, manifest,
DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter));
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
drmSessionManager, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null,
mainHandler, player, 50);
// Build the audio renderer.
@ -172,14 +174,15 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
} else {
audioTrackNames = new String[audioStreamElementCount];
ChunkSource[] audioChunkSources = new ChunkSource[audioStreamElementCount];
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter);
FormatEvaluator audioFormatEvaluator = new FormatEvaluator.FixedEvaluator();
audioStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioTrackNames[audioStreamElementCount] = manifest.streamElements[i].name;
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(url, manifest,
i, new int[] {0}, audioDataSource, audioFormatEvaluator);
audioChunkSources[audioStreamElementCount] = new SmoothStreamingChunkSource(
manifestFetcher, i, new int[] {0}, audioDataSource, audioFormatEvaluator,
LIVE_EDGE_LATENCY_MS);
audioStreamElementCount++;
}
}
@ -202,20 +205,20 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
} else {
textTrackNames = new String[textStreamElementCount];
ChunkSource[] textChunkSources = new ChunkSource[textStreamElementCount];
DataSource ttmlDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
DataSource ttmlDataSource = new UriDataSource(userAgent, bandwidthMeter);
FormatEvaluator ttmlFormatEvaluator = new FormatEvaluator.FixedEvaluator();
textStreamElementCount = 0;
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_TEXT) {
textTrackNames[textStreamElementCount] = manifest.streamElements[i].language;
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(url, manifest,
i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator);
textChunkSources[textStreamElementCount] = new SmoothStreamingChunkSource(manifestFetcher,
i, new int[] {0}, ttmlDataSource, ttmlFormatEvaluator, LIVE_EDGE_LATENCY_MS);
textStreamElementCount++;
}
}
textChunkSource = new MultiTrackChunkSource(textChunkSources);
ChunkSampleSource ttmlSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TTML_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player,
DemoPlayer.TYPE_TEXT);
textRenderer = new TextTrackRenderer(ttmlSampleSource, new TtmlParser(), player,
mainHandler.getLooper());
@ -249,7 +252,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
public static DrmSessionManager getDrmSessionManager(UUID uuid, DemoPlayer player,
MediaDrmCallback drmCallback) throws UnsupportedSchemeException {
return new StreamingDrmSessionManager(uuid, player.getPlaybackLooper(), drmCallback,
return new StreamingDrmSessionManager(uuid, player.getPlaybackLooper(), drmCallback, null,
player.getMainHandler(), player);
}

View File

@ -29,7 +29,7 @@ import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionFetcher;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescriptionParser;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.demo.simple.SimplePlayerActivity.RendererBuilder;
@ -37,24 +37,29 @@ import com.google.android.exoplayer.demo.simple.SimplePlayerActivity.RendererBui
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.HttpDataSource;
import com.google.android.exoplayer.upstream.UriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.media.MediaCodec;
import android.os.AsyncTask;
import android.os.Handler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* A {@link RendererBuilder} for DASH VOD.
* A {@link RendererBuilder} for DASH.
*/
/* package */ class DashVodRendererBuilder implements RendererBuilder,
/* package */ class DashRendererBuilder implements RendererBuilder,
ManifestCallback<MediaPresentationDescription> {
private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 60;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final SimplePlayerActivity playerActivity;
private final String userAgent;
@ -62,8 +67,9 @@ import java.util.ArrayList;
private final String contentId;
private RendererBuilderCallback callback;
private ManifestFetcher<MediaPresentationDescription> manifestFetcher;
public DashVodRendererBuilder(SimplePlayerActivity playerActivity, String userAgent, String url,
public DashRendererBuilder(SimplePlayerActivity playerActivity, String userAgent, String url,
String contentId) {
this.playerActivity = playerActivity;
this.userAgent = userAgent;
@ -74,59 +80,63 @@ import java.util.ArrayList;
@Override
public void buildRenderers(RendererBuilderCallback callback) {
this.callback = callback;
MediaPresentationDescriptionFetcher mpdFetcher = new MediaPresentationDescriptionFetcher(this);
mpdFetcher.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, url, contentId);
MediaPresentationDescriptionParser parser = new MediaPresentationDescriptionParser();
manifestFetcher = new ManifestFetcher<MediaPresentationDescription>(parser, contentId, url,
userAgent);
manifestFetcher.singleLoad(playerActivity.getMainLooper(), this);
}
@Override
public void onManifestError(String contentId, Exception e) {
public void onManifestError(String contentId, IOException e) {
callback.onRenderersError(e);
}
@Override
public void onManifest(String contentId, MediaPresentationDescription manifest) {
Period period = manifest.periods.get(0);
Handler mainHandler = playerActivity.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
// Obtain Representations for playback.
// Determine which video representations we should use for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
Representation audioRepresentation = null;
ArrayList<Representation> videoRepresentationsList = new ArrayList<Representation>();
Period period = manifest.periods.get(0);
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
int adaptationSetType = adaptationSet.type;
for (int j = 0; j < adaptationSet.representations.size(); j++) {
Representation representation = adaptationSet.representations.get(j);
if (audioRepresentation == null && adaptationSetType == AdaptationSet.TYPE_AUDIO) {
audioRepresentation = representation;
} else if (adaptationSetType == AdaptationSet.TYPE_VIDEO) {
Format format = representation.format;
if (format.width * format.height <= maxDecodableFrameSize) {
videoRepresentationsList.add(representation);
} else {
// The device isn't capable of playing this stream.
}
}
int videoAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO);
List<Representation> videoRepresentations =
period.adaptationSets.get(videoAdaptationSetIndex).representations;
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
for (int i = 0; i < videoRepresentations.size(); i++) {
Format format = videoRepresentations.get(i).format;
if (format.width * format.height > maxDecodableFrameSize) {
// Filtering stream that device cannot play
} else if (!format.mimeType.equals(MimeTypes.VIDEO_MP4)
&& !format.mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// Filtering unsupported mime type
} else {
videoRepresentationIndexList.add(i);
}
}
Representation[] videoRepresentations = new Representation[videoRepresentationsList.size()];
videoRepresentationsList.toArray(videoRepresentations);
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), videoRepresentations);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50);
final MediaCodecVideoTrackRenderer videoRenderer;
if (videoRepresentationIndexList.isEmpty()) {
videoRenderer = null;
} else {
int[] videoRepresentationIndices = Util.toArray(videoRepresentationIndexList);
DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, videoAdaptationSetIndex,
videoRepresentationIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter),
LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50);
}
// Build the audio renderer.
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource audioChunkSource = new DashChunkSource(audioDataSource,
new FormatEvaluator.FixedEvaluator(), audioRepresentation);
int audioAdaptationSetIndex = period.getAdaptationSetIndex(AdaptationSet.TYPE_AUDIO);
DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, audioAdaptationSetIndex,
new int[] {0}, audioDataSource, new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(

View File

@ -61,10 +61,6 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
private static final String TAG = "PlayerActivity";
public static final int TYPE_DASH_VOD = 0;
public static final int TYPE_SS_VOD = 1;
public static final int TYPE_OTHER = 2;
private MediaController mediaController;
private Handler mainHandler;
private View shutterView;
@ -76,7 +72,7 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
private MediaCodecVideoTrackRenderer videoRenderer;
private boolean autoPlay = true;
private int playerPosition;
private long playerPosition;
private Uri contentUri;
private int contentType;
@ -90,7 +86,7 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
Intent intent = getIntent();
contentUri = intent.getData();
contentType = intent.getIntExtra(DemoUtil.CONTENT_TYPE_EXTRA, TYPE_OTHER);
contentType = intent.getIntExtra(DemoUtil.CONTENT_TYPE_EXTRA, DemoUtil.TYPE_OTHER);
contentId = intent.getStringExtra(DemoUtil.CONTENT_ID_EXTRA);
mainHandler = new Handler(getMainLooper());
@ -113,6 +109,8 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
shutterView = findViewById(R.id.shutter);
surfaceView = (VideoSurfaceView) findViewById(R.id.surface_view);
surfaceView.getHolder().addCallback(this);
DemoUtil.setDefaultCookieManager();
}
@Override
@ -163,11 +161,11 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
private RendererBuilder getRendererBuilder() {
String userAgent = DemoUtil.getUserAgent(this);
switch (contentType) {
case TYPE_SS_VOD:
case DemoUtil.TYPE_SS:
return new SmoothStreamingRendererBuilder(this, userAgent, contentUri.toString(),
contentId);
case TYPE_DASH_VOD:
return new DashVodRendererBuilder(this, userAgent, contentUri.toString(), contentId);
case DemoUtil.TYPE_DASH:
return new DashRendererBuilder(this, userAgent, contentUri.toString(), contentId);
default:
return new DefaultRendererBuilder(this, contentUri);
}
@ -231,8 +229,9 @@ public class SimplePlayerActivity extends Activity implements SurfaceHolder.Call
// MediaCodecVideoTrackRenderer.Listener
@Override
public void onVideoSizeChanged(int width, int height) {
surfaceView.setVideoWidthHeightRatio(height == 0 ? 1 : (float) width / height);
public void onVideoSizeChanged(int width, int height, float pixelWidthHeightRatio) {
surfaceView.setVideoWidthHeightRatio(
height == 0 ? 1 : (pixelWidthHeightRatio * width) / height);
}
@Override

View File

@ -31,16 +31,19 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingChunkSource;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.StreamElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestFetcher;
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifestParser;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer.upstream.HttpDataSource;
import com.google.android.exoplayer.upstream.UriDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.Util;
import android.media.MediaCodec;
import android.os.Handler;
import java.io.IOException;
import java.util.ArrayList;
/**
@ -52,6 +55,7 @@ import java.util.ArrayList;
private static final int BUFFER_SEGMENT_SIZE = 64 * 1024;
private static final int VIDEO_BUFFER_SEGMENTS = 200;
private static final int AUDIO_BUFFER_SEGMENTS = 60;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final SimplePlayerActivity playerActivity;
private final String userAgent;
@ -59,6 +63,7 @@ import java.util.ArrayList;
private final String contentId;
private RendererBuilderCallback callback;
private ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
public SmoothStreamingRendererBuilder(SimplePlayerActivity playerActivity, String userAgent,
String url, String contentId) {
@ -71,12 +76,14 @@ import java.util.ArrayList;
@Override
public void buildRenderers(RendererBuilderCallback callback) {
this.callback = callback;
SmoothStreamingManifestFetcher mpdFetcher = new SmoothStreamingManifestFetcher(this);
mpdFetcher.execute(url + "/Manifest", contentId);
SmoothStreamingManifestParser parser = new SmoothStreamingManifestParser();
manifestFetcher = new ManifestFetcher<SmoothStreamingManifest>(parser, contentId,
url + "/Manifest", userAgent);
manifestFetcher.singleLoad(playerActivity.getMainLooper(), this);
}
@Override
public void onManifestError(String contentId, Exception e) {
public void onManifestError(String contentId, IOException e) {
callback.onRenderersError(e);
}
@ -109,26 +116,23 @@ import java.util.ArrayList;
}
}
}
int[] videoTrackIndices = new int[videoTrackIndexList.size()];
for (int i = 0; i < videoTrackIndexList.size(); i++) {
videoTrackIndices[i] = videoTrackIndexList.get(i);
}
int[] videoTrackIndices = Util.toArray(videoTrackIndexList);
// Build the video renderer.
DataSource videoDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(url, manifest,
DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter));
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50);
// Build the audio renderer.
DataSource audioDataSource = new HttpDataSource(userAgent, null, bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(url, manifest,
DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
audioStreamElementIndex, new int[] {0}, audioDataSource,
new FormatEvaluator.FixedEvaluator());
new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(

View File

@ -8,6 +8,6 @@
# project structure.
# Project target.
target=android-19
target=android-21
android.library=false
android.library.reference.1=../../../library/src/main

View File

@ -24,15 +24,13 @@
android:layout_height="match_parent"
android:layout_gravity="center"/>
<TextView android:id="@+id/subtitles"
android:layout_width="match_parent"
<com.google.android.exoplayer.text.SubtitleView android:id="@+id/subtitles"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center|bottom"
android:paddingLeft="8dp"
android:paddingRight="8dp"
android:paddingBottom="32dp"
android:gravity="center"
android:textSize="20sp"
android:layout_gravity="bottom|center_horizontal"
android:layout_marginLeft="16dp"
android:layout_marginRight="16dp"
android:layout_marginBottom="32dp"
android:visibility="invisible"/>
<View android:id="@+id/shutter"

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Copyright (C) 2014 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The minimum subtitle font size. -->
<dimen name="subtitle_minimum_font_size">13sp</dimen>
</resources>

Binary file not shown.

View File

@ -1,6 +1,6 @@
#Tue Jun 10 20:02:28 BST 2014
#Thu Nov 20 12:15:03 PST 2014
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=http\://services.gradle.org/distributions/gradle-1.12-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-2.2.1-rc-1-bin.zip

View File

@ -1,53 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>ExoPlayerLib</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
<filteredResources>
<filter>
<id>1363908161147</id>
<name></name>
<type>22</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-name-matches-false-false-BUILD</arguments>
</matcher>
</filter>
<filter>
<id>1363908161148</id>
<name></name>
<type>10</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-name-matches-true-false-build</arguments>
</matcher>
</filter>
</filteredResources>
</projectDescription>

View File

@ -11,20 +11,20 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
apply plugin: 'android-library'
apply plugin: 'com.android.library'
android {
compileSdkVersion 19
buildToolsVersion "19.1"
compileSdkVersion 21
buildToolsVersion "21.1.1"
defaultConfig {
minSdkVersion 9
targetSdkVersion 19
targetSdkVersion 21
}
buildTypes {
release {
runProguard false
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 342 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -30,24 +30,4 @@
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
<filteredResources>
<filter>
<id>1363908161147</id>
<name></name>
<type>22</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-name-matches-false-false-BUILD</arguments>
</matcher>
</filter>
<filter>
<id>1363908161148</id>
<name></name>
<type>10</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-name-matches-true-false-build</arguments>
</matcher>
</filter>
</filteredResources>
</projectDescription>

View File

@ -27,6 +27,6 @@
the library may be of use on older devices. However, please note that the core video playback
functionality provided by the library requires API level 16 or greater.
-->
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="19"/>
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="21"/>
</manifest>

View File

@ -0,0 +1,33 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import java.io.IOException;
/**
* Thrown when a live playback falls behind the available media window.
*/
public class BehindLiveWindowException extends IOException {
public BehindLiveWindowException() {
super();
}
public BehindLiveWindowException(String message) {
super(message);
}
}

View File

@ -20,11 +20,26 @@ package com.google.android.exoplayer;
*/
public final class C {
/**
* Represents an unknown microsecond time or duration.
*/
public static final long UNKNOWN_TIME_US = -1L;
/**
* The number of microseconds in one second.
*/
public static final long MICROS_PER_SECOND = 1000000L;
/**
* Represents an unbounded length of data.
*/
public static final int LENGTH_UNBOUNDED = -1;
/**
* The name of the UTF-8 charset.
*/
public static final String UTF8_NAME = "UTF-8";
private C() {}
}

View File

@ -29,7 +29,7 @@ public final class DecoderInfo {
public final String name;
/**
* Whether the decoder is adaptive.
* Whether the decoder supports seamless resolution switches.
*
* @see android.media.MediaCodecInfo.CodecCapabilities#isFeatureSupported(String)
* @see android.media.MediaCodecInfo.CodecCapabilities#FEATURE_AdaptivePlayback

View File

@ -166,9 +166,9 @@ public class DefaultLoadControl implements LoadControl {
// Update the loader state.
int loaderBufferState = getLoaderBufferState(playbackPositionUs, nextLoadPositionUs);
LoaderState loaderState = loaderStates.get(loader);
boolean loaderStateChanged = loaderState.bufferState != loaderBufferState ||
loaderState.nextLoadPositionUs != nextLoadPositionUs || loaderState.loading != loading ||
loaderState.failed != failed;
boolean loaderStateChanged = loaderState.bufferState != loaderBufferState
|| loaderState.nextLoadPositionUs != nextLoadPositionUs || loaderState.loading != loading
|| loaderState.failed != failed;
if (loaderStateChanged) {
loaderState.bufferState = loaderBufferState;
loaderState.nextLoadPositionUs = nextLoadPositionUs;
@ -214,17 +214,17 @@ public class DefaultLoadControl implements LoadControl {
private void updateControlState() {
boolean loading = false;
boolean failed = false;
boolean finished = true;
boolean haveNextLoadPosition = false;
int highestState = bufferPoolState;
for (int i = 0; i < loaders.size(); i++) {
LoaderState loaderState = loaderStates.get(loaders.get(i));
loading |= loaderState.loading;
failed |= loaderState.failed;
finished &= loaderState.nextLoadPositionUs == -1;
haveNextLoadPosition |= loaderState.nextLoadPositionUs != -1;
highestState = Math.max(highestState, loaderState.bufferState);
}
fillingBuffers = !loaders.isEmpty() && !finished && !failed
fillingBuffers = !loaders.isEmpty() && !failed && (loading || haveNextLoadPosition)
&& (highestState == BELOW_LOW_WATERMARK
|| (highestState == BETWEEN_WATERMARKS && fillingBuffers));
if (fillingBuffers && !streamingPrioritySet) {

View File

@ -40,12 +40,12 @@ public class DummyTrackRenderer extends TrackRenderer {
}
@Override
protected void seekTo(long timeUs) {
protected void seekTo(long positionUs) {
throw new IllegalStateException();
}
@Override
protected void doSomeWork(long timeUs) {
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) {
throw new IllegalStateException();
}

View File

@ -229,7 +229,7 @@ public interface ExoPlayer {
/**
* Represents an unknown time or duration.
*/
public static final int UNKNOWN_TIME = -1;
public static final long UNKNOWN_TIME = -1;
/**
* Gets the {@link Looper} associated with the playback thread.
@ -313,7 +313,7 @@ public interface ExoPlayer {
*
* @param positionMs The seek position.
*/
public void seekTo(int positionMs);
public void seekTo(long positionMs);
/**
* Stops playback. Use {@code setPlayWhenReady(false)} rather than this method if the intention
@ -363,14 +363,14 @@ public interface ExoPlayer {
* @return The duration of the track in milliseconds, or {@link ExoPlayer#UNKNOWN_TIME} if the
* duration is not known.
*/
public int getDuration();
public long getDuration();
/**
* Gets the current playback position in milliseconds.
*
* @return The current playback position in milliseconds.
*/
public int getCurrentPosition();
public long getCurrentPosition();
/**
* Gets an estimate of the absolute position in milliseconds up to which data is buffered.
@ -378,7 +378,7 @@ public interface ExoPlayer {
* @return An estimate of the absolute position in milliseconds up to which data is buffered,
* or {@link ExoPlayer#UNKNOWN_TIME} if no estimate is available.
*/
public int getBufferedPosition();
public long getBufferedPosition();
/**
* Gets an estimate of the percentage into the media up to which data is buffered.

View File

@ -130,7 +130,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
@Override
public void seekTo(int positionMs) {
public void seekTo(long positionMs) {
internalPlayer.seekTo(positionMs);
}
@ -156,26 +156,26 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
@Override
public int getDuration() {
public long getDuration() {
return internalPlayer.getDuration();
}
@Override
public int getCurrentPosition() {
public long getCurrentPosition() {
return internalPlayer.getCurrentPosition();
}
@Override
public int getBufferedPosition() {
public long getBufferedPosition() {
return internalPlayer.getBufferedPosition();
}
@Override
public int getBufferedPercentage() {
int bufferedPosition = getBufferedPosition();
int duration = getDuration();
long bufferedPosition = getBufferedPosition();
long duration = getDuration();
return bufferedPosition == ExoPlayer.UNKNOWN_TIME || duration == ExoPlayer.UNKNOWN_TIME ? 0
: (duration == 0 ? 100 : (bufferedPosition * 100) / duration);
: (int) (duration == 0 ? 100 : (bufferedPosition * 100) / duration);
}
// Not private so it can be called from an inner class without going through a thunk method.

View File

@ -17,9 +17,9 @@ package com.google.android.exoplayer;
import com.google.android.exoplayer.ExoPlayer.ExoPlayerComponent;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.PriorityHandlerThread;
import com.google.android.exoplayer.util.TraceUtil;
import android.annotation.SuppressLint;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
@ -77,12 +77,12 @@ import java.util.List;
private int state;
private int customMessagesSent = 0;
private int customMessagesProcessed = 0;
private long elapsedRealtimeUs;
private volatile long durationUs;
private volatile long positionUs;
private volatile long bufferedPositionUs;
@SuppressLint("HandlerLeak")
public ExoPlayerImplInternal(Handler eventHandler, boolean playWhenReady,
boolean[] rendererEnabledFlags, int minBufferMs, int minRebufferMs) {
this.eventHandler = eventHandler;
@ -100,15 +100,10 @@ import java.util.List;
mediaClock = new MediaClock();
enabledRenderers = new ArrayList<TrackRenderer>(rendererEnabledFlags.length);
internalPlaybackThread = new HandlerThread(getClass().getSimpleName() + ":Handler") {
@Override
public void run() {
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
super.run();
}
};
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
internalPlaybackThread = new PriorityHandlerThread(getClass().getSimpleName() + ":Handler",
Process.THREAD_PRIORITY_AUDIO);
internalPlaybackThread.start();
handler = new Handler(internalPlaybackThread.getLooper(), this);
}
@ -117,18 +112,18 @@ import java.util.List;
return internalPlaybackThread.getLooper();
}
public int getCurrentPosition() {
return (int) (positionUs / 1000);
public long getCurrentPosition() {
return positionUs / 1000;
}
public int getBufferedPosition() {
public long getBufferedPosition() {
return bufferedPositionUs == TrackRenderer.UNKNOWN_TIME_US ? ExoPlayer.UNKNOWN_TIME
: (int) (bufferedPositionUs / 1000);
: bufferedPositionUs / 1000;
}
public int getDuration() {
public long getDuration() {
return durationUs == TrackRenderer.UNKNOWN_TIME_US ? ExoPlayer.UNKNOWN_TIME
: (int) (durationUs / 1000);
: durationUs / 1000;
}
public void prepare(TrackRenderer... renderers) {
@ -139,8 +134,8 @@ import java.util.List;
handler.obtainMessage(MSG_SET_PLAY_WHEN_READY, playWhenReady ? 1 : 0, 0).sendToTarget();
}
public void seekTo(int positionMs) {
handler.obtainMessage(MSG_SEEK_TO, positionMs, 0).sendToTarget();
public void seekTo(long positionMs) {
handler.obtainMessage(MSG_SEEK_TO, positionMs).sendToTarget();
}
public void stop() {
@ -158,6 +153,10 @@ import java.util.List;
public synchronized void blockingSendMessage(ExoPlayerComponent target, int messageType,
Object message) {
if (released) {
Log.w(TAG, "Sent message(" + messageType + ") after release. Message ignored.");
return;
}
int messageNumber = customMessagesSent++;
handler.obtainMessage(MSG_CUSTOM, messageType, 0, Pair.create(target, message)).sendToTarget();
while (customMessagesProcessed <= messageNumber) {
@ -170,17 +169,18 @@ import java.util.List;
}
public synchronized void release() {
if (!released) {
handler.sendEmptyMessage(MSG_RELEASE);
while (!released) {
try {
wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
internalPlaybackThread.quit();
if (released) {
return;
}
handler.sendEmptyMessage(MSG_RELEASE);
while (!released) {
try {
wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
internalPlaybackThread.quit();
}
@Override
@ -204,7 +204,7 @@ import java.util.List;
return true;
}
case MSG_SEEK_TO: {
seekToInternal(msg.arg1);
seekToInternal((Long) msg.obj);
return true;
}
case MSG_STOP: {
@ -378,7 +378,8 @@ import java.util.List;
positionUs = timeSourceTrackRenderer != null &&
enabledRenderers.contains(timeSourceTrackRenderer) ?
timeSourceTrackRenderer.getCurrentPositionUs() :
mediaClock.getTimeUs();
mediaClock.getPositionUs();
elapsedRealtimeUs = SystemClock.elapsedRealtime() * 1000;
}
private void doSomeWork() throws ExoPlaybackException {
@ -394,7 +395,7 @@ import java.util.List;
// TODO: Each renderer should return the maximum delay before which it wishes to be
// invoked again. The minimum of these values should then be used as the delay before the next
// invocation of this method.
renderer.doSomeWork(positionUs);
renderer.doSomeWork(positionUs, elapsedRealtimeUs);
isEnded = isEnded && renderer.isEnded();
allRenderersReadyOrEnded = allRenderersReadyOrEnded && rendererReadyOrEnded(renderer);
@ -453,11 +454,11 @@ import java.util.List;
}
}
private void seekToInternal(int positionMs) throws ExoPlaybackException {
private void seekToInternal(long positionMs) throws ExoPlaybackException {
rebuffering = false;
positionUs = positionMs * 1000L;
mediaClock.stop();
mediaClock.setTimeUs(positionUs);
mediaClock.setPositionUs(positionUs);
if (state == ExoPlayer.STATE_IDLE || state == ExoPlayer.STATE_PREPARING) {
return;
}
@ -491,21 +492,9 @@ import java.util.List;
return;
}
for (int i = 0; i < renderers.length; i++) {
try {
TrackRenderer renderer = renderers[i];
ensureStopped(renderer);
if (renderer.getState() == TrackRenderer.STATE_ENABLED) {
renderer.disable();
}
renderer.release();
} catch (ExoPlaybackException e) {
// There's nothing we can do. Catch the exception here so that other renderers still have
// a chance of being cleaned up correctly.
Log.e(TAG, "Stop failed.", e);
} catch (RuntimeException e) {
// Ditto.
Log.e(TAG, "Stop failed.", e);
}
TrackRenderer renderer = renderers[i];
stopAndDisable(renderer);
release(renderer);
}
renderers = null;
timeSourceTrackRenderer = null;
@ -513,6 +502,33 @@ import java.util.List;
setState(ExoPlayer.STATE_IDLE);
}
private void stopAndDisable(TrackRenderer renderer) {
try {
ensureStopped(renderer);
if (renderer.getState() == TrackRenderer.STATE_ENABLED) {
renderer.disable();
}
} catch (ExoPlaybackException e) {
// There's nothing we can do.
Log.e(TAG, "Stop failed.", e);
} catch (RuntimeException e) {
// Ditto.
Log.e(TAG, "Stop failed.", e);
}
}
private void release(TrackRenderer renderer) {
try {
renderer.release();
} catch (ExoPlaybackException e) {
// There's nothing we can do.
Log.e(TAG, "Release failed.", e);
} catch (RuntimeException e) {
// Ditto.
Log.e(TAG, "Release failed.", e);
}
}
private <T> void sendMessageInternal(int what, Object obj)
throws ExoPlaybackException {
try {
@ -562,7 +578,7 @@ import java.util.List;
if (renderer == timeSourceTrackRenderer) {
// We've been using timeSourceTrackRenderer to advance the current position, but it's
// being disabled. Sync mediaClock so that it can take over timing responsibilities.
mediaClock.setTimeUs(renderer.getCurrentPositionUs());
mediaClock.setPositionUs(renderer.getCurrentPositionUs());
}
ensureStopped(renderer);
enabledRenderers.remove(renderer);

View File

@ -26,15 +26,15 @@ public class ExoPlayerLibraryInfo {
/**
* The version of the library, expressed as a string.
*/
public static final String VERSION = "1.0.13";
public static final String VERSION = "1.1.0";
/**
* The version of the library, expressed as an integer.
* <p>
* Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
* corresponding integer version 1002003.
* corresponding integer version 001002003.
*/
public static final int VERSION_INT = 1000013;
public static final int VERSION_INT = 001001000;
/**
* Whether the library was compiled with {@link com.google.android.exoplayer.util.Assertions}

View File

@ -50,7 +50,7 @@ public final class FrameworkSampleSource implements SampleSource {
private int[] trackStates;
private boolean[] pendingDiscontinuities;
private long seekTimeUs;
private long seekPositionUs;
public FrameworkSampleSource(Context context, Uri uri, Map<String, String> headers,
int downstreamRendererCount) {
@ -71,10 +71,10 @@ public final class FrameworkSampleSource implements SampleSource {
trackInfos = new TrackInfo[trackStates.length];
for (int i = 0; i < trackStates.length; i++) {
android.media.MediaFormat format = extractor.getTrackFormat(i);
long duration = format.containsKey(android.media.MediaFormat.KEY_DURATION) ?
format.getLong(android.media.MediaFormat.KEY_DURATION) : TrackRenderer.UNKNOWN_TIME_US;
long durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION) ?
format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US;
String mime = format.getString(android.media.MediaFormat.KEY_MIME);
trackInfos[i] = new TrackInfo(mime, duration);
trackInfos[i] = new TrackInfo(mime, durationUs);
}
prepared = true;
}
@ -94,16 +94,16 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public void enable(int track, long timeUs) {
public void enable(int track, long positionUs) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] == TRACK_STATE_DISABLED);
trackStates[track] = TRACK_STATE_ENABLED;
extractor.selectTrack(track);
seekToUs(timeUs);
seekToUs(positionUs);
}
@Override
public boolean continueBuffering(long playbackPositionUs) {
public boolean continueBuffering(long positionUs) {
// MediaExtractor takes care of buffering and blocks until it has samples, so we can always
// return true here. Although note that the blocking behavior is itself as bug, as per the
// TODO further up this file. This method will need to return something else as part of fixing
@ -112,7 +112,7 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder,
public int readData(int track, long positionUs, MediaFormatHolder formatHolder,
SampleHolder sampleHolder, boolean onlyReadDiscontinuity) {
Assertions.checkState(prepared);
Assertions.checkState(trackStates[track] != TRACK_STATE_DISABLED);
@ -144,7 +144,7 @@ public final class FrameworkSampleSource implements SampleSource {
if ((sampleHolder.flags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
sampleHolder.cryptoInfo.setFromExtractorV16(extractor);
}
seekTimeUs = -1;
seekPositionUs = -1;
extractor.advance();
return SAMPLE_READ;
} else {
@ -168,13 +168,13 @@ public final class FrameworkSampleSource implements SampleSource {
}
@Override
public void seekToUs(long timeUs) {
public void seekToUs(long positionUs) {
Assertions.checkState(prepared);
if (seekTimeUs != timeUs) {
if (seekPositionUs != positionUs) {
// Avoid duplicate calls to the underlying extractor's seek method in the case that there
// have been no interleaving calls to advance.
seekTimeUs = timeUs;
extractor.seekTo(timeUs, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
seekPositionUs = positionUs;
extractor.seekTo(positionUs, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
for (int i = 0; i < trackStates.length; ++i) {
if (trackStates[i] != TRACK_STATE_DISABLED) {
pendingDiscontinuities[i] = true;

View File

@ -65,9 +65,10 @@ public interface LoadControl {
*
* @param loader The loader invoking the update.
* @param playbackPositionUs The loader's playback position.
* @param nextLoadPositionUs The loader's next load position, or -1 if finished.
* @param nextLoadPositionUs The loader's next load position. -1 if finished, failed, or if the
* next load position is not yet known.
* @param loading Whether the loader is currently loading data.
* @param failed Whether the loader has failed, meaning it does not wish to load more data.
* @param failed Whether the loader has failed.
* @return True if the loader is allowed to start its next load. False otherwise.
*/
boolean update(Object loader, long playbackPositionUs, long nextLoadPositionUs,

View File

@ -29,10 +29,10 @@ import android.os.SystemClock;
/**
* The media time when the clock was last set or stopped.
*/
private long timeUs;
private long positionUs;
/**
* The difference between {@link SystemClock#elapsedRealtime()} and {@link #timeUs}
* The difference between {@link SystemClock#elapsedRealtime()} and {@link #positionUs}
* when the clock was last set or started.
*/
private long deltaUs;
@ -43,7 +43,7 @@ import android.os.SystemClock;
public void start() {
if (!started) {
started = true;
deltaUs = elapsedRealtimeMinus(timeUs);
deltaUs = elapsedRealtimeMinus(positionUs);
}
}
@ -52,28 +52,28 @@ import android.os.SystemClock;
*/
public void stop() {
if (started) {
timeUs = elapsedRealtimeMinus(deltaUs);
positionUs = elapsedRealtimeMinus(deltaUs);
started = false;
}
}
/**
* @param timeUs The time to set in microseconds.
* @param timeUs The position to set in microseconds.
*/
public void setTimeUs(long timeUs) {
this.timeUs = timeUs;
public void setPositionUs(long timeUs) {
this.positionUs = timeUs;
deltaUs = elapsedRealtimeMinus(timeUs);
}
/**
* @return The current time in microseconds.
* @return The current position in microseconds.
*/
public long getTimeUs() {
return started ? elapsedRealtimeMinus(deltaUs) : timeUs;
public long getPositionUs() {
return started ? elapsedRealtimeMinus(deltaUs) : positionUs;
}
private long elapsedRealtimeMinus(long microSeconds) {
return SystemClock.elapsedRealtime() * 1000 - microSeconds;
private long elapsedRealtimeMinus(long toSubtractUs) {
return SystemClock.elapsedRealtime() * 1000 - toSubtractUs;
}
}

View File

@ -15,28 +15,21 @@
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTimestamp;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.audiofx.Virtualizer;
import android.os.ConditionVariable;
import android.os.Handler;
import android.util.Log;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
/**
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
* Decodes and renders audio using {@link MediaCodec} and {@link android.media.AudioTrack}.
*/
@TargetApi(16)
public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
@ -52,26 +45,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*
* @param e The corresponding exception.
*/
void onAudioTrackInitializationError(AudioTrackInitializationException e);
}
/**
* Thrown when a failure occurs instantiating an audio track.
*/
public static class AudioTrackInitializationException extends Exception {
/**
* The state as reported by {@link AudioTrack#getState()}
*/
public final int audioTrackState;
public AudioTrackInitializationException(int audioTrackState, int sampleRate,
int channelConfig, int bufferSize) {
super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", " +
channelConfig + ", " + bufferSize + ")");
this.audioTrackState = audioTrackState;
}
void onAudioTrackInitializationError(AudioTrack.InitializationException e);
}
@ -82,65 +56,12 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*/
public static final int MSG_SET_VOLUME = 1;
/**
* The default multiplication factor used when determining the size of the underlying
* {@link AudioTrack}'s buffer.
*/
public static final float DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR = 4;
private static final String TAG = "MediaCodecAudioTrackRenderer";
private static final long MICROS_PER_SECOND = 1000000L;
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
* than this amount. This is a fail safe that should not be required on correctly functioning
* devices.
*/
private static final long MAX_AUDIO_TIMSTAMP_OFFSET_US = 10 * MICROS_PER_SECOND;
private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
private static final int START_NOT_SET = 0;
private static final int START_IN_SYNC = 1;
private static final int START_NEED_SYNC = 2;
private final EventListener eventListener;
private final ConditionVariable audioTrackReleasingConditionVariable;
private final AudioTimestampCompat audioTimestampCompat;
private final long[] playheadOffsets;
private final float minBufferMultiplicationFactor;
private int nextPlayheadOffsetIndex;
private int playheadOffsetCount;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
private boolean audioTimestampSet;
private long lastTimestampSampleTimeUs;
private long lastRawPlaybackHeadPosition;
private long rawPlaybackHeadWrapCount;
private int sampleRate;
private int frameSize;
private int channelConfig;
private int minBufferSize;
private int bufferSize;
private AudioTrack audioTrack;
private Method audioTrackGetLatencyMethod;
private final AudioTrack audioTrack;
private int audioSessionId;
private long submittedBytes;
private int audioTrackStartMediaTimeState;
private long audioTrackStartMediaTimeUs;
private long audioTrackResumeSystemTimeUs;
private long lastReportedCurrentPositionUs;
private long audioTrackLatencyUs;
private float volume;
private byte[] temporaryBuffer;
private int temporaryBufferOffset;
private int temporaryBufferSize;
private long currentPositionUs;
/**
* @param source The upstream source from which the renderer obtains samples.
@ -190,15 +111,16 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*/
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener) {
this(source, drmSessionManager, playClearSamplesWithoutKeys,
DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR, eventHandler, eventListener);
this(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener,
new AudioTrack());
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param minBufferMultiplicationFactor When instantiating an underlying {@link AudioTrack},
* the size of the track's is calculated as this value multiplied by the minimum buffer size
* obtained from {@link AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* @param minBufferMultiplicationFactor When instantiating an underlying
* {@link android.media.AudioTrack}, the size of the track is calculated as this value
* multiplied by the minimum buffer size obtained from
* {@link android.media.AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* factor must be greater than or equal to 1.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
@ -218,9 +140,10 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param minBufferMultiplicationFactor When instantiating an underlying {@link AudioTrack},
* the size of the track's is calculated as this value multiplied by the minimum buffer size
* obtained from {@link AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* @param minBufferMultiplicationFactor When instantiating an underlying
* {@link android.media.AudioTrack}, the size of the track is calculated as this value
* multiplied by the minimum buffer size obtained from
* {@link android.media.AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* factor must be greater than or equal to 1.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
@ -229,25 +152,31 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, float minBufferMultiplicationFactor,
Handler eventHandler, EventListener eventListener) {
this(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener,
new AudioTrack(minBufferMultiplicationFactor));
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioTrack Used for playing back decoded audio samples.
*/
public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener,
AudioTrack audioTrack) {
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
Assertions.checkState(minBufferMultiplicationFactor >= 1);
this.minBufferMultiplicationFactor = minBufferMultiplicationFactor;
this.eventListener = eventListener;
audioTrackReleasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 19) {
audioTimestampCompat = new AudioTimestampCompatV19();
} else {
audioTimestampCompat = new NoopAudioTimestampCompat();
}
if (Util.SDK_INT >= 18) {
try {
audioTrackGetLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class<?>[]) null);
} catch (NoSuchMethodException e) {
// There's no guarantee this method exists. Do nothing.
}
}
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
this.audioTrack = Assertions.checkNotNull(audioTrack);
this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
}
@Override
@ -261,103 +190,14 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
}
@Override
protected void onEnabled(long timeUs, boolean joining) {
super.onEnabled(timeUs, joining);
lastReportedCurrentPositionUs = 0;
}
@Override
protected void doSomeWork(long timeUs) throws ExoPlaybackException {
super.doSomeWork(timeUs);
maybeSampleSyncParams();
protected void onEnabled(long positionUs, boolean joining) {
super.onEnabled(positionUs, joining);
currentPositionUs = Long.MIN_VALUE;
}
@Override
protected void onOutputFormatChanged(MediaFormat format) {
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig;
switch (channelCount) {
case 1:
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
break;
case 2:
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
break;
case 6:
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
break;
default:
throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
}
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
if (audioTrack != null && this.sampleRate == sampleRate
&& this.channelConfig == channelConfig) {
// We already have an existing audio track with the correct sample rate and channel config.
return;
}
releaseAudioTrack();
this.sampleRate = sampleRate;
this.channelConfig = channelConfig;
this.minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT);
this.bufferSize = (int) (minBufferMultiplicationFactor * minBufferSize);
this.frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
}
private void initAudioTrack() throws ExoPlaybackException {
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
audioTrackReleasingConditionVariable.block();
if (audioSessionId == 0) {
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
checkAudioTrackInitialized();
audioSessionId = audioTrack.getAudioSessionId();
onAudioSessionId(audioSessionId);
} else {
// Re-attach to the same audio session.
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM, audioSessionId);
checkAudioTrackInitialized();
}
audioTrack.setStereoVolume(volume, volume);
if (getState() == TrackRenderer.STATE_STARTED) {
audioTrackResumeSystemTimeUs = System.nanoTime() / 1000;
audioTrack.play();
}
}
/**
* Checks that {@link #audioTrack} has been successfully initialized. If it has then calling this
* method is a no-op. If it hasn't then {@link #audioTrack} is released and set to null, and an
* exception is thrown.
*
* @throws ExoPlaybackException If {@link #audioTrack} has not been successfully initialized.
*/
private void checkAudioTrackInitialized() throws ExoPlaybackException {
int audioTrackState = audioTrack.getState();
if (audioTrackState == AudioTrack.STATE_INITIALIZED) {
return;
}
// The track is not successfully initialized. Release and null the track.
try {
audioTrack.release();
} catch (Exception e) {
// The track has already failed to initialize, so it wouldn't be that surprising if release
// were to fail too. Swallow the exception.
} finally {
audioTrack = null;
}
// Propagate the relevant exceptions.
AudioTrackInitializationException exception = new AudioTrackInitializationException(
audioTrackState, sampleRate, channelConfig, bufferSize);
notifyAudioTrackInitializationError(exception);
throw new ExoPlaybackException(exception);
audioTrack.reconfigure(format);
}
/**
@ -376,324 +216,121 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
// Do nothing.
}
private void releaseAudioTrack() {
if (audioTrack != null) {
submittedBytes = 0;
temporaryBufferSize = 0;
lastRawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
audioTrackStartMediaTimeUs = 0;
audioTrackStartMediaTimeState = START_NOT_SET;
resetSyncParams();
int playState = audioTrack.getPlayState();
if (playState == AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.pause();
}
// AudioTrack.release can take some time, so we call it on a background thread.
final AudioTrack toRelease = audioTrack;
audioTrack = null;
audioTrackReleasingConditionVariable.close();
new Thread() {
@Override
public void run() {
try {
toRelease.release();
} finally {
audioTrackReleasingConditionVariable.open();
}
}
}.start();
}
}
@Override
protected void onStarted() {
super.onStarted();
if (audioTrack != null) {
audioTrackResumeSystemTimeUs = System.nanoTime() / 1000;
audioTrack.play();
}
audioTrack.play();
}
@Override
protected void onStopped() {
audioTrack.pause();
super.onStopped();
if (audioTrack != null) {
resetSyncParams();
audioTrack.pause();
}
}
@Override
protected boolean isEnded() {
// We've exhausted the output stream, and the AudioTrack has either played all of the data
// submitted, or has been fed insufficient data to begin playback.
return super.isEnded() && (getPendingFrameCount() == 0 || submittedBytes < minBufferSize);
return super.isEnded() && (!audioTrack.hasPendingData()
|| !audioTrack.hasEnoughDataToBeginPlayback());
}
@Override
protected boolean isReady() {
return getPendingFrameCount() > 0
return audioTrack.hasPendingData()
|| (super.isReady() && getSourceState() == SOURCE_STATE_READY_READ_MAY_FAIL);
}
/**
* This method uses a variety of techniques to compute the current position:
*
* 1. Prior to playback having started, calls up to the super class to obtain the pending seek
* position.
* 2. During playback, uses AudioTimestamps obtained from AudioTrack.getTimestamp on supported
* devices.
* 3. Else, derives a smoothed position by sampling the AudioTrack's frame position.
*/
@Override
protected long getCurrentPositionUs() {
long systemClockUs = System.nanoTime() / 1000;
long currentPositionUs;
if (audioTrack == null || audioTrackStartMediaTimeState == START_NOT_SET) {
// The AudioTrack hasn't started.
currentPositionUs = super.getCurrentPositionUs();
} else if (audioTimestampSet) {
// How long ago in the past the audio timestamp is (negative if it's in the future)
long presentationDiff = systemClockUs - (audioTimestampCompat.getNanoTime() / 1000);
long framesDiff = durationUsToFrames(presentationDiff);
// The position of the frame that's currently being presented.
long currentFramePosition = audioTimestampCompat.getFramePosition() + framesDiff;
currentPositionUs = framesToDurationUs(currentFramePosition) + audioTrackStartMediaTimeUs;
long audioTrackCurrentPositionUs = audioTrack.getCurrentPositionUs(isEnded());
if (audioTrackCurrentPositionUs == AudioTrack.CURRENT_POSITION_NOT_SET) {
// Use the super class position before audio playback starts.
currentPositionUs = Math.max(currentPositionUs, super.getCurrentPositionUs());
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs = getPlayheadPositionUs() + audioTrackStartMediaTimeUs;
} else {
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
currentPositionUs = systemClockUs + smoothedPlayheadOffsetUs + audioTrackStartMediaTimeUs;
}
if (!isEnded()) {
currentPositionUs -= audioTrackLatencyUs;
}
// Make sure we don't ever report time moving backwards.
currentPositionUs = Math.max(currentPositionUs, audioTrackCurrentPositionUs);
}
// Make sure we don't ever report time moving backwards as a result of smoothing or switching
// between the various code paths above.
currentPositionUs = Math.max(lastReportedCurrentPositionUs, currentPositionUs);
lastReportedCurrentPositionUs = currentPositionUs;
return currentPositionUs;
}
private void maybeSampleSyncParams() {
if (audioTrack == null || audioTrackStartMediaTimeState == START_NOT_SET
|| getState() != STATE_STARTED) {
// The AudioTrack isn't playing.
return;
}
long playheadPositionUs = getPlayheadPositionUs();
if (playheadPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
}
long systemClockUs = System.nanoTime() / 1000;
if (systemClockUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) {
// Take a new sample and update the smoothed offset between the system clock and the playhead.
playheadOffsets[nextPlayheadOffsetIndex] = playheadPositionUs - systemClockUs;
nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT;
if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) {
playheadOffsetCount++;
}
lastPlayheadSampleTimeUs = systemClockUs;
smoothedPlayheadOffsetUs = 0;
for (int i = 0; i < playheadOffsetCount; i++) {
smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount;
}
}
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
audioTimestampSet = audioTimestampCompat.initTimestamp(audioTrack);
if (audioTimestampSet) {
// Perform sanity checks on the timestamp.
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
if (audioTimestampUs < audioTrackResumeSystemTimeUs) {
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet = false;
} else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMSTAMP_OFFSET_US) {
// The timestamp time base is probably wrong.
audioTimestampSet = false;
Log.w(TAG, "Spurious audio timestamp: " + audioTimestampCompat.getFramePosition() + ", "
+ audioTimestampUs + ", " + systemClockUs);
}
}
if (audioTrackGetLatencyMethod != null) {
try {
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
audioTrackLatencyUs =
(Integer) audioTrackGetLatencyMethod.invoke(audioTrack, (Object[]) null) * 1000L -
framesToDurationUs(bufferSize / frameSize);
// Sanity check that the latency is non-negative.
audioTrackLatencyUs = Math.max(audioTrackLatencyUs, 0);
} catch (Exception e) {
// The method existed, but doesn't work. Don't try again.
audioTrackGetLatencyMethod = null;
}
}
lastTimestampSampleTimeUs = systemClockUs;
}
}
private void resetSyncParams() {
smoothedPlayheadOffsetUs = 0;
playheadOffsetCount = 0;
nextPlayheadOffsetIndex = 0;
lastPlayheadSampleTimeUs = 0;
audioTimestampSet = false;
lastTimestampSampleTimeUs = 0;
}
private long getPlayheadPositionUs() {
return framesToDurationUs(getPlaybackHeadPosition());
}
private long framesToDurationUs(long frameCount) {
return (frameCount * MICROS_PER_SECOND) / sampleRate;
}
private long durationUsToFrames(long durationUs) {
return (durationUs * sampleRate) / MICROS_PER_SECOND;
}
@Override
protected void onDisabled() {
super.onDisabled();
releaseAudioTrack();
audioSessionId = 0;
audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
try {
audioTrack.reset();
} finally {
super.onDisabled();
}
}
@Override
protected void seekTo(long timeUs) throws ExoPlaybackException {
super.seekTo(timeUs);
// TODO: Try and re-use the same AudioTrack instance once [redacted] is fixed.
releaseAudioTrack();
lastReportedCurrentPositionUs = 0;
protected void seekTo(long positionUs) throws ExoPlaybackException {
super.seekTo(positionUs);
// TODO: Try and re-use the same AudioTrack instance once [Internal: b/7941810] is fixed.
audioTrack.reset();
currentPositionUs = Long.MIN_VALUE;
}
@Override
protected boolean processOutputBuffer(long timeUs, MediaCodec codec, ByteBuffer buffer,
MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip)
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip)
throws ExoPlaybackException {
if (shouldSkip) {
codec.releaseOutputBuffer(bufferIndex, false);
codecCounters.skippedOutputBufferCount++;
if (audioTrackStartMediaTimeState == START_IN_SYNC) {
// Skipping the sample will push track time out of sync. We'll need to sync again.
audioTrackStartMediaTimeState = START_NEED_SYNC;
}
audioTrack.handleDiscontinuity();
return true;
}
if (temporaryBufferSize == 0) {
// This is the first time we've seen this {@code buffer}.
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
long bufferStartTime = bufferInfo.presentationTimeUs -
framesToDurationUs(bufferInfo.size / frameSize);
if (audioTrackStartMediaTimeState == START_NOT_SET) {
audioTrackStartMediaTimeUs = Math.max(0, bufferStartTime);
audioTrackStartMediaTimeState = START_IN_SYNC;
} else {
// Sanity check that bufferStartTime is consistent with the expected value.
long expectedBufferStartTime = audioTrackStartMediaTimeUs +
framesToDurationUs(submittedBytes / frameSize);
if (audioTrackStartMediaTimeState == START_IN_SYNC
&& Math.abs(expectedBufferStartTime - bufferStartTime) > 200000) {
Log.e(TAG, "Discontinuity detected [expected " + expectedBufferStartTime + ", got " +
bufferStartTime + "]");
audioTrackStartMediaTimeState = START_NEED_SYNC;
}
if (audioTrackStartMediaTimeState == START_NEED_SYNC) {
// Adjust audioTrackStartMediaTimeUs to be consistent with the current buffer's start
// time and the number of bytes submitted. Also reset lastReportedCurrentPositionUs to
// allow time to jump backwards if it really wants to.
audioTrackStartMediaTimeUs += (bufferStartTime - expectedBufferStartTime);
lastReportedCurrentPositionUs = 0;
// Initialize and start the audio track now.
if (!audioTrack.isInitialized()) {
try {
if (audioSessionId != AudioTrack.SESSION_ID_NOT_SET) {
audioTrack.initialize(audioSessionId);
} else {
audioSessionId = audioTrack.initialize();
onAudioSessionId(audioSessionId);
}
} catch (AudioTrack.InitializationException e) {
notifyAudioTrackInitializationError(e);
throw new ExoPlaybackException(e);
}
// Copy {@code buffer} into {@code temporaryBuffer}.
// TODO: Bypass this copy step on versions of Android where [redacted] is implemented.
if (temporaryBuffer == null || temporaryBuffer.length < bufferInfo.size) {
temporaryBuffer = new byte[bufferInfo.size];
if (getState() == TrackRenderer.STATE_STARTED) {
audioTrack.play();
}
buffer.position(bufferInfo.offset);
buffer.get(temporaryBuffer, 0, bufferInfo.size);
temporaryBufferOffset = 0;
temporaryBufferSize = bufferInfo.size;
}
if (audioTrack == null) {
initAudioTrack();
int handleBufferResult = audioTrack.handleBuffer(
buffer, bufferInfo.offset, bufferInfo.size, bufferInfo.presentationTimeUs);
// If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
currentPositionUs = Long.MIN_VALUE;
}
// TODO: Don't bother doing this once [redacted] is fixed.
// Work out how many bytes we can write without the risk of blocking.
int bytesPending = (int) (submittedBytes - getPlaybackHeadPosition() * frameSize);
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) {
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
temporaryBufferOffset += bytesToWrite;
temporaryBufferSize -= bytesToWrite;
submittedBytes += bytesToWrite;
if (temporaryBufferSize == 0) {
codec.releaseOutputBuffer(bufferIndex, false);
codecCounters.renderedOutputBufferCount++;
return true;
}
// Release the buffer if it was consumed.
if ((handleBufferResult & AudioTrack.RESULT_BUFFER_CONSUMED) != 0) {
codec.releaseOutputBuffer(bufferIndex, false);
codecCounters.renderedOutputBufferCount++;
return true;
}
return false;
}
/**
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as
* an unsigned 32 bit integer, which also wraps around periodically. This method returns the
* playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack} expressed as a
* long.
*/
private long getPlaybackHeadPosition() {
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
// The value must have wrapped around.
rawPlaybackHeadWrapCount++;
}
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
private int getPendingFrameCount() {
return audioTrack == null ?
0 : (int) (submittedBytes / frameSize - getPlaybackHeadPosition());
}
@Override
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
if (messageType == MSG_SET_VOLUME) {
setVolume((Float) message);
audioTrack.setVolume((Float) message);
} else {
super.handleMessage(messageType, message);
}
}
private void setVolume(float volume) {
this.volume = volume;
if (audioTrack != null) {
audioTrack.setStereoVolume(volume, volume);
}
}
private void notifyAudioTrackInitializationError(final AudioTrackInitializationException e) {
private void notifyAudioTrackInitializationError(final AudioTrack.InitializationException e) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
@ -704,74 +341,4 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
}
}
/**
* Interface exposing the {@link AudioTimestamp} methods we need that were added in SDK 19.
*/
private interface AudioTimestampCompat {
/**
* Returns true if the audioTimestamp was retrieved from the audioTrack.
*/
boolean initTimestamp(AudioTrack audioTrack);
long getNanoTime();
long getFramePosition();
}
/**
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
*/
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
@Override
public boolean initTimestamp(AudioTrack audioTrack) {
return false;
}
@Override
public long getNanoTime() {
// Should never be called if initTimestamp() returned false.
throw new UnsupportedOperationException();
}
@Override
public long getFramePosition() {
// Should never be called if initTimestamp() returned false.
throw new UnsupportedOperationException();
}
}
/**
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
* implementations added in SDK 19.
*/
@TargetApi(19)
private static final class AudioTimestampCompatV19 implements AudioTimestampCompat {
private final AudioTimestamp audioTimestamp;
public AudioTimestampCompatV19() {
audioTimestamp = new AudioTimestamp();
}
@Override
public boolean initTimestamp(AudioTrack audioTrack) {
return audioTrack.getTimestamp(audioTimestamp);
}
@Override
public long getNanoTime() {
return audioTimestamp.nanoTime;
}
@Override
public long getFramePosition() {
return audioTimestamp.framePosition;
}
}
}

View File

@ -21,6 +21,7 @@ import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodec.CodecException;
import android.media.MediaCodec.CryptoException;
import android.media.MediaCrypto;
import android.media.MediaExtractor;
@ -29,7 +30,8 @@ import android.os.SystemClock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@ -70,27 +72,42 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
*/
public final String decoderName;
/**
* An optional developer-readable diagnostic information string. May be null.
*/
public final String diagnosticInfo;
public DecoderInitializationException(String decoderName, MediaFormat mediaFormat,
Exception cause) {
Throwable cause) {
super("Decoder init failed: " + decoderName + ", " + mediaFormat, cause);
this.decoderName = decoderName;
this.diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null;
}
@TargetApi(21)
private static String getDiagnosticInfoV21(Throwable cause) {
if (cause instanceof CodecException) {
return ((CodecException) cause).getDiagnosticInfo();
}
return null;
}
}
/**
* Value of {@link #sourceState} when the source is not ready.
* Value returned by {@link #getSourceState()} when the source is not ready.
*/
protected static final int SOURCE_STATE_NOT_READY = 0;
/**
* Value of {@link #sourceState} when the source is ready and we're able to read from it.
* Value returned by {@link #getSourceState()} when the source is ready and we're able to read
* from it.
*/
protected static final int SOURCE_STATE_READY = 1;
/**
* Value of {@link #sourceState} when the source is ready but we might not be able to read from
* it. We transition to this state when an attempt to read a sample fails despite the source
* reporting that samples are available. This can occur when the next sample to be provided by
* the source is for another renderer.
* Value returned by {@link #getSourceState()} when the source is ready but we might not be able
* to read from it. We transition to this state when an attempt to read a sample fails despite the
* source reporting that samples are available. This can occur when the next sample to be provided
* by the source is for another renderer.
*/
protected static final int SOURCE_STATE_READY_READ_MAY_FAIL = 2;
@ -125,7 +142,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
private final SampleSource source;
private final SampleHolder sampleHolder;
private final MediaFormatHolder formatHolder;
private final HashSet<Long> decodeOnlyPresentationTimestamps;
private final List<Long> decodeOnlyPresentationTimestamps;
private final MediaCodec.BufferInfo outputBufferInfo;
private final EventListener eventListener;
protected final Handler eventHandler;
@ -173,9 +190,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
this.eventHandler = eventHandler;
this.eventListener = eventListener;
codecCounters = new CodecCounters();
sampleHolder = new SampleHolder(false);
sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED);
formatHolder = new MediaFormatHolder();
decodeOnlyPresentationTimestamps = new HashSet<Long>();
decodeOnlyPresentationTimestamps = new ArrayList<Long>();
outputBufferInfo = new MediaCodec.BufferInfo();
}
@ -216,13 +233,13 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
@Override
protected void onEnabled(long timeUs, boolean joining) {
source.enable(trackIndex, timeUs);
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
sourceState = SOURCE_STATE_NOT_READY;
inputStreamEnded = false;
outputStreamEnded = false;
waitingForKeys = false;
currentPositionUs = timeUs;
currentPositionUs = positionUs;
}
/**
@ -234,6 +251,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
codec.configure(x, null, crypto, 0);
}
@SuppressWarnings("deprecation")
protected final void maybeInitCodec() throws ExoPlaybackException {
if (!shouldInitCodec()) {
return;
@ -263,11 +281,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
DecoderInfo selectedDecoderInfo = MediaCodecUtil.getDecoderInfo(mimeType);
DecoderInfo selectedDecoderInfo = MediaCodecUtil.getDecoderInfo(mimeType,
requiresSecureDecoder);
String selectedDecoderName = selectedDecoderInfo.name;
if (requiresSecureDecoder) {
selectedDecoderName = getSecureDecoderName(selectedDecoderName);
}
codecIsAdaptive = selectedDecoderInfo.adaptive;
try {
codec = MediaCodec.createByCodecName(selectedDecoderName);
@ -366,9 +382,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
@Override
protected void seekTo(long timeUs) throws ExoPlaybackException {
currentPositionUs = timeUs;
source.seekToUs(timeUs);
protected void seekTo(long positionUs) throws ExoPlaybackException {
currentPositionUs = positionUs;
source.seekToUs(positionUs);
sourceState = SOURCE_STATE_NOT_READY;
inputStreamEnded = false;
outputStreamEnded = false;
@ -386,22 +402,22 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
@Override
protected void doSomeWork(long timeUs) throws ExoPlaybackException {
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
try {
sourceState = source.continueBuffering(timeUs)
sourceState = source.continueBuffering(positionUs)
? (sourceState == SOURCE_STATE_NOT_READY ? SOURCE_STATE_READY : sourceState)
: SOURCE_STATE_NOT_READY;
checkForDiscontinuity();
if (format == null) {
readFormat();
} else if (codec == null && !shouldInitCodec() && getState() == TrackRenderer.STATE_STARTED) {
discardSamples(timeUs);
discardSamples(positionUs);
} else {
if (codec == null && shouldInitCodec()) {
maybeInitCodec();
}
if (codec != null) {
while (drainOutputBuffer(timeUs)) {}
while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {}
if (feedInputBuffer(true)) {
while (feedInputBuffer(false)) {}
}
@ -420,10 +436,10 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
private void discardSamples(long timeUs) throws IOException, ExoPlaybackException {
private void discardSamples(long positionUs) throws IOException, ExoPlaybackException {
sampleHolder.data = null;
int result = SampleSource.SAMPLE_READ;
while (result == SampleSource.SAMPLE_READ && currentPositionUs <= timeUs) {
while (result == SampleSource.SAMPLE_READ && currentPositionUs <= positionUs) {
result = source.readData(trackIndex, currentPositionUs, formatHolder, sampleHolder, false);
if (result == SampleSource.SAMPLE_READ) {
if (!sampleHolder.decodeOnly) {
@ -452,7 +468,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
waitingForFirstSyncFrame = true;
decodeOnlyPresentationTimestamps.clear();
// Workaround for framework bugs.
// See [redacted], [redacted], [redacted].
// See [Internal: b/8347958], [Internal: b/8578467], [Internal: b/8543366].
if (Util.SDK_INT >= 18) {
codec.flush();
} else {
@ -468,7 +484,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
/**
* @param firstFeed True if this is the first call to this method from the current invocation of
* {@link #doSomeWork(long)}. False otherwise.
* {@link #doSomeWork(long, long)}. False otherwise.
* @return True if it may be possible to feed more input data. False otherwise.
* @throws IOException If an error occurs reading data from the upstream source.
* @throws ExoPlaybackException If an error occurs feeding the input buffer.
@ -620,7 +636,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
* @param formatHolder Holds the new format.
* @throws ExoPlaybackException If an error occurs reinitializing the {@link MediaCodec}.
*/
private void onInputFormatChanged(MediaFormatHolder formatHolder) throws ExoPlaybackException {
protected void onInputFormatChanged(MediaFormatHolder formatHolder) throws ExoPlaybackException {
MediaFormat oldFormat = format;
format = formatHolder.format;
drmInitData = formatHolder.drmInitData;
@ -672,7 +688,7 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
@Override
protected boolean isReady() {
return format != null && !waitingForKeys
&& sourceState != SOURCE_STATE_NOT_READY || outputIndex >= 0 || isWithinHotswapPeriod();
&& (sourceState != SOURCE_STATE_NOT_READY || outputIndex >= 0 || isWithinHotswapPeriod());
}
/**
@ -693,7 +709,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
* @return True if it may be possible to drain more output data. False otherwise.
* @throws ExoPlaybackException If an error occurs draining the output buffer.
*/
private boolean drainOutputBuffer(long timeUs) throws ExoPlaybackException {
@SuppressWarnings("deprecation")
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
if (outputStreamEnded) {
return false;
}
@ -719,12 +737,11 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
return false;
}
boolean decodeOnly = decodeOnlyPresentationTimestamps.contains(
outputBufferInfo.presentationTimeUs);
if (processOutputBuffer(timeUs, codec, outputBuffers[outputIndex], outputBufferInfo,
outputIndex, decodeOnly)) {
if (decodeOnly) {
decodeOnlyPresentationTimestamps.remove(outputBufferInfo.presentationTimeUs);
int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
if (decodeOnlyIndex != -1) {
decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
} else {
currentPositionUs = outputBufferInfo.presentationTimeUs;
}
@ -742,16 +759,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
* longer required. False otherwise.
* @throws ExoPlaybackException If an error occurs processing the output buffer.
*/
protected abstract boolean processOutputBuffer(long timeUs, MediaCodec codec, ByteBuffer buffer,
MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip)
throws ExoPlaybackException;
/**
* Returns the name of the secure variant of a given decoder.
*/
private static String getSecureDecoderName(String rawDecoderName) {
return rawDecoderName + ".secure";
}
protected abstract boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs,
MediaCodec codec, ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex,
boolean shouldSkip) throws ExoPlaybackException;
private void notifyDecoderInitializationError(final DecoderInitializationException e) {
if (eventHandler != null && eventListener != null) {
@ -775,4 +785,14 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
private int getDecodeOnlyIndex(long presentationTimeUs) {
final int size = decodeOnlyPresentationTimestamps.size();
for (int i = 0; i < size; i++) {
if (decodeOnlyPresentationTimestamps.get(i).longValue() == presentationTimeUs) {
return i;
}
}
return -1;
}
}

View File

@ -23,6 +23,8 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo.CodecProfileLevel;
import android.media.MediaCodecList;
import android.text.TextUtils;
import android.util.Log;
import android.util.Pair;
import java.util.HashMap;
@ -33,57 +35,99 @@ import java.util.HashMap;
@TargetApi(16)
public class MediaCodecUtil {
private static final HashMap<String, Pair<MediaCodecInfo, CodecCapabilities>> codecs =
new HashMap<String, Pair<MediaCodecInfo, CodecCapabilities>>();
private static final String TAG = "MediaCodecUtil";
private static final HashMap<CodecKey, Pair<String, CodecCapabilities>> codecs =
new HashMap<CodecKey, Pair<String, CodecCapabilities>>();
/**
* Get information about the decoder that will be used for a given mime type. If no decoder
* exists for the mime type then null is returned.
* Get information about the decoder that will be used for a given mime type.
*
* @param mimeType The mime type.
* @param secure Whether the decoder is required to support secure decryption. Always pass false
* unless secure decryption really is required.
* @return Information about the decoder that will be used, or null if no decoder exists.
*/
public static DecoderInfo getDecoderInfo(String mimeType) {
Pair<MediaCodecInfo, CodecCapabilities> info = getMediaCodecInfo(mimeType);
public static DecoderInfo getDecoderInfo(String mimeType, boolean secure) {
Pair<String, CodecCapabilities> info = getMediaCodecInfo(mimeType, secure);
if (info == null) {
return null;
}
return new DecoderInfo(info.first.getName(), isAdaptive(info.second));
return new DecoderInfo(info.first, isAdaptive(info.second));
}
/**
* Optional call to warm the codec cache. Call from any appropriate
* place to hide latency.
* Optional call to warm the codec cache for a given mime type.
* <p>
* Calling this method may speed up subsequent calls to {@link #getDecoderInfo(String, boolean)}.
*
* @param mimeType The mime type.
* @param secure Whether the decoder is required to support secure decryption. Always pass false
* unless secure decryption really is required.
*/
public static synchronized void warmCodecs(String[] mimeTypes) {
for (int i = 0; i < mimeTypes.length; i++) {
getMediaCodecInfo(mimeTypes[i]);
}
public static synchronized void warmCodec(String mimeType, boolean secure) {
getMediaCodecInfo(mimeType, secure);
}
/**
* Returns the best decoder and its capabilities for the given mimeType. If there's no decoder
* returns null.
* Returns the name of the best decoder and its capabilities for the given mimeType.
*/
private static synchronized Pair<MediaCodecInfo, CodecCapabilities> getMediaCodecInfo(
String mimeType) {
Pair<MediaCodecInfo, CodecCapabilities> result = codecs.get(mimeType);
if (result != null) {
return result;
private static synchronized Pair<String, CodecCapabilities> getMediaCodecInfo(
String mimeType, boolean secure) {
CodecKey key = new CodecKey(mimeType, secure);
if (codecs.containsKey(key)) {
return codecs.get(key);
}
int numberOfCodecs = MediaCodecList.getCodecCount();
MediaCodecListCompat mediaCodecList = Util.SDK_INT >= 21
? new MediaCodecListCompatV21(secure) : new MediaCodecListCompatV16();
Pair<String, CodecCapabilities> codecInfo = getMediaCodecInfo(key, mediaCodecList);
// TODO: Verify this cannot occur on v22, and change >= to == [Internal: b/18678462].
if (secure && codecInfo == null && Util.SDK_INT >= 21) {
// Some devices don't list secure decoders on API level 21. Try the legacy path.
mediaCodecList = new MediaCodecListCompatV16();
codecInfo = getMediaCodecInfo(key, mediaCodecList);
if (codecInfo != null) {
Log.w(TAG, "MediaCodecList API didn't list secure decoder for: " + mimeType
+ ". Assuming: " + codecInfo.first);
}
}
return codecInfo;
}
private static Pair<String, CodecCapabilities> getMediaCodecInfo(CodecKey key,
MediaCodecListCompat mediaCodecList) {
String mimeType = key.mimeType;
int numberOfCodecs = mediaCodecList.getCodecCount();
boolean secureDecodersExplicit = mediaCodecList.secureDecodersExplicit();
// Note: MediaCodecList is sorted by the framework such that the best decoders come first.
for (int i = 0; i < numberOfCodecs; i++) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
MediaCodecInfo info = mediaCodecList.getCodecInfoAt(i);
String codecName = info.getName();
if (!info.isEncoder() && isOmxCodec(codecName)) {
if (!info.isEncoder() && codecName.startsWith("OMX.")
&& (secureDecodersExplicit || !codecName.endsWith(".secure"))) {
String[] supportedTypes = info.getSupportedTypes();
for (int j = 0; j < supportedTypes.length; j++) {
String supportedType = supportedTypes[j];
if (supportedType.equalsIgnoreCase(mimeType)) {
result = Pair.create(info, info.getCapabilitiesForType(supportedType));
codecs.put(mimeType, result);
return result;
CodecCapabilities capabilities = info.getCapabilitiesForType(supportedType);
boolean secure = mediaCodecList.isSecurePlaybackSupported(key.mimeType, capabilities);
if (!secureDecodersExplicit) {
// Cache variants for both insecure and (if we think it's supported) secure playback.
codecs.put(key.secure ? new CodecKey(mimeType, false) : key,
Pair.create(codecName, capabilities));
if (secure) {
codecs.put(key.secure ? key : new CodecKey(mimeType, true),
Pair.create(codecName + ".secure", capabilities));
}
} else {
// Only cache this variant. If both insecure and secure decoders are available, they
// should both be listed separately.
codecs.put(key.secure == secure ? key : new CodecKey(mimeType, secure),
Pair.create(codecName, capabilities));
}
if (codecs.containsKey(key)) {
return codecs.get(key);
}
}
}
}
@ -91,10 +135,6 @@ public class MediaCodecUtil {
return null;
}
private static boolean isOmxCodec(String name) {
return name.startsWith("OMX.");
}
private static boolean isAdaptive(CodecCapabilities capabilities) {
if (Util.SDK_INT >= 19) {
return isAdaptiveV19(capabilities);
@ -114,7 +154,7 @@ public class MediaCodecUtil {
* @return Whether the specified profile is supported at the specified level.
*/
public static boolean isH264ProfileSupported(int profile, int level) {
Pair<MediaCodecInfo, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264);
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
if (info == null) {
return false;
}
@ -134,7 +174,7 @@ public class MediaCodecUtil {
* @return the maximum frame size for an H264 stream that can be decoded on the device.
*/
public static int maxH264DecodableFrameSize() {
Pair<MediaCodecInfo, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264);
Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false);
if (info == null) {
return 0;
}
@ -178,4 +218,123 @@ public class MediaCodecUtil {
}
}
private interface MediaCodecListCompat {
/**
* The number of codecs in the list.
*/
public int getCodecCount();
/**
* The info at the specified index in the list.
*
* @param index The index.
*/
public MediaCodecInfo getCodecInfoAt(int index);
/**
* @return Returns whether secure decoders are explicitly listed, if present.
*/
public boolean secureDecodersExplicit();
/**
* Whether secure playback is supported for the given {@link CodecCapabilities}, which should
* have been obtained from a {@link MediaCodecInfo} obtained from this list.
*/
public boolean isSecurePlaybackSupported(String mimeType, CodecCapabilities capabilities);
}
@TargetApi(21)
private static final class MediaCodecListCompatV21 implements MediaCodecListCompat {
private final MediaCodecInfo[] mediaCodecInfos;
public MediaCodecListCompatV21(boolean includeSecure) {
int codecKind = includeSecure ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS;
mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos();
}
@Override
public int getCodecCount() {
return mediaCodecInfos.length;
}
@Override
public MediaCodecInfo getCodecInfoAt(int index) {
return mediaCodecInfos[index];
}
@Override
public boolean secureDecodersExplicit() {
return true;
}
@Override
public boolean isSecurePlaybackSupported(String mimeType, CodecCapabilities capabilities) {
return capabilities.isFeatureSupported(CodecCapabilities.FEATURE_SecurePlayback);
}
}
@SuppressWarnings("deprecation")
private static final class MediaCodecListCompatV16 implements MediaCodecListCompat {
@Override
public int getCodecCount() {
return MediaCodecList.getCodecCount();
}
@Override
public MediaCodecInfo getCodecInfoAt(int index) {
return MediaCodecList.getCodecInfoAt(index);
}
@Override
public boolean secureDecodersExplicit() {
return false;
}
@Override
public boolean isSecurePlaybackSupported(String mimeType, CodecCapabilities capabilities) {
// Secure decoders weren't explicitly listed prior to API level 21. We assume that a secure
// H264 decoder exists.
return MimeTypes.VIDEO_H264.equals(mimeType);
}
}
private static final class CodecKey {
public final String mimeType;
public final boolean secure;
public CodecKey(String mimeType, boolean secure) {
this.mimeType = mimeType;
this.secure = secure;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((mimeType == null) ? 0 : mimeType.hashCode());
result = prime * result + (secure ? 1231 : 1237);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != CodecKey.class) {
return false;
}
CodecKey other = (CodecKey) obj;
return TextUtils.equals(mimeType, other.mimeType) && secure == other.secure;
}
}
}

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer;
import com.google.android.exoplayer.drm.DrmSessionManager;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.TraceUtil;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.MediaCodec;
@ -58,8 +59,11 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
*
* @param width The video width in pixels.
* @param height The video height in pixels.
* @param pixelWidthHeightRatio The width to height ratio of each pixel. For the normal case
* of square pixels this will be equal to 1.0. Different values are indicative of anamorphic
* content.
*/
void onVideoSizeChanged(int width, int height);
void onVideoSizeChanged(int width, int height, float pixelWidthHeightRatio);
/**
* Invoked when a frame is rendered to a surface for the first time following that surface
@ -71,7 +75,35 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
}
// TODO: Use MediaFormat constants if these get exposed through the API. See [redacted].
/**
* An interface for fine-grained adjustment of frame release times.
*/
public interface FrameReleaseTimeHelper {
/**
* Enables the helper.
*/
void enable();
/**
* Disables the helper.
*/
void disable();
/**
* Called to make a fine-grained adjustment to a frame release time.
*
* @param framePresentationTimeUs The frame's media presentation time, in microseconds.
* @param unadjustedReleaseTimeNs The frame's unadjusted release time, in nanoseconds and in
* the same time base as {@link System#nanoTime()}.
* @return An adjusted release time for the frame, in nanoseconds and in the same time base as
* {@link System#nanoTime()}.
*/
public long adjustReleaseTime(long framePresentationTimeUs, long unadjustedReleaseTimeNs);
}
// TODO: Use MediaFormat constants if these get exposed through the API. See [Internal: b/14127601].
private static final String KEY_CROP_LEFT = "crop-left";
private static final String KEY_CROP_RIGHT = "crop-right";
private static final String KEY_CROP_BOTTOM = "crop-bottom";
@ -84,13 +116,14 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
*/
public static final int MSG_SET_SURFACE = 1;
private final FrameReleaseTimeHelper frameReleaseTimeHelper;
private final EventListener eventListener;
private final long allowedJoiningTimeUs;
private final int videoScalingMode;
private final int maxDroppedFrameCountToNotify;
private Surface surface;
private boolean drawnToSurface;
private boolean reportedDrawnToSurface;
private boolean renderedFirstFrame;
private long joiningDeadlineUs;
private long droppedFrameAccumulationStartTimeMs;
@ -98,8 +131,10 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
private int currentWidth;
private int currentHeight;
private float currentPixelWidthHeightRatio;
private int lastReportedWidth;
private int lastReportedHeight;
private float lastReportedPixelWidthHeightRatio;
/**
* @param source The upstream source from which the renderer obtains samples.
@ -156,7 +191,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs) {
this(source, drmSessionManager, playClearSamplesWithoutKeys, videoScalingMode,
allowedJoiningTimeMs, null, null, -1);
allowedJoiningTimeMs, null, null, null, -1);
}
/**
@ -174,8 +209,8 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
public MediaCodecVideoTrackRenderer(SampleSource source, int videoScalingMode,
long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener,
int maxDroppedFrameCountToNotify) {
this(source, null, true, videoScalingMode, allowedJoiningTimeMs, eventHandler, eventListener,
maxDroppedFrameCountToNotify);
this(source, null, true, videoScalingMode, allowedJoiningTimeMs, null, eventHandler,
eventListener, maxDroppedFrameCountToNotify);
}
/**
@ -191,6 +226,8 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
* {@link MediaCodec#setVideoScalingMode(int)}.
* @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer
* can attempt to seamlessly join an ongoing playback.
* @param frameReleaseTimeHelper An optional helper to make fine-grained adjustments to frame
* release times. May be null.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
@ -199,17 +236,21 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
*/
public MediaCodecVideoTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, int videoScalingMode, long allowedJoiningTimeMs,
Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) {
FrameReleaseTimeHelper frameReleaseTimeHelper, Handler eventHandler,
EventListener eventListener, int maxDroppedFrameCountToNotify) {
super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener);
this.videoScalingMode = videoScalingMode;
this.allowedJoiningTimeUs = allowedJoiningTimeMs * 1000;
this.frameReleaseTimeHelper = frameReleaseTimeHelper;
this.eventListener = eventListener;
this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
joiningDeadlineUs = -1;
currentWidth = -1;
currentHeight = -1;
currentPixelWidthHeightRatio = -1;
lastReportedWidth = -1;
lastReportedHeight = -1;
lastReportedPixelWidthHeightRatio = -1;
}
@Override
@ -218,17 +259,20 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
}
@Override
protected void onEnabled(long startTimeUs, boolean joining) {
super.onEnabled(startTimeUs, joining);
protected void onEnabled(long positionUs, boolean joining) {
super.onEnabled(positionUs, joining);
renderedFirstFrame = false;
if (joining && allowedJoiningTimeUs > 0) {
joiningDeadlineUs = SystemClock.elapsedRealtime() * 1000L + allowedJoiningTimeUs;
}
if (frameReleaseTimeHelper != null) {
frameReleaseTimeHelper.enable();
}
}
@Override
protected void seekTo(long timeUs) throws ExoPlaybackException {
super.seekTo(timeUs);
protected void seekTo(long positionUs) throws ExoPlaybackException {
super.seekTo(positionUs);
renderedFirstFrame = false;
joiningDeadlineUs = -1;
}
@ -262,18 +306,23 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected void onStopped() {
super.onStopped();
joiningDeadlineUs = -1;
notifyAndResetDroppedFrameCount();
maybeNotifyDroppedFrameCount();
super.onStopped();
}
@Override
public void onDisabled() {
super.onDisabled();
currentWidth = -1;
currentHeight = -1;
currentPixelWidthHeightRatio = -1;
lastReportedWidth = -1;
lastReportedHeight = -1;
lastReportedPixelWidthHeightRatio = -1;
if (frameReleaseTimeHelper != null) {
frameReleaseTimeHelper.disable();
}
super.onDisabled();
}
@Override
@ -294,7 +343,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
return;
}
this.surface = surface;
this.drawnToSurface = false;
this.reportedDrawnToSurface = false;
int state = getState();
if (state == TrackRenderer.STATE_ENABLED || state == TrackRenderer.STATE_STARTED) {
releaseCodec();
@ -315,6 +364,15 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
codec.setVideoScalingMode(videoScalingMode);
}
@Override
protected void onInputFormatChanged(MediaFormatHolder holder) throws ExoPlaybackException {
super.onInputFormatChanged(holder);
// TODO: Ideally this would be read in onOutputFormatChanged, but there doesn't seem
// to be a way to pass a custom key/value pair value through to the output format.
currentPixelWidthHeightRatio = holder.format.pixelWidthHeightRatio == MediaFormat.NO_VALUE ? 1
: holder.format.pixelWidthHeightRatio;
}
@Override
protected void onOutputFormatChanged(android.media.MediaFormat format) {
boolean hasCrop = format.containsKey(KEY_CROP_RIGHT) && format.containsKey(KEY_CROP_LEFT)
@ -330,22 +388,37 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected boolean canReconfigureCodec(MediaCodec codec, boolean codecIsAdaptive,
MediaFormat oldFormat, MediaFormat newFormat) {
// TODO: Relax this check to also allow non-H264 adaptive decoders.
return newFormat.mimeType.equals(MimeTypes.VIDEO_H264)
&& oldFormat.mimeType.equals(MimeTypes.VIDEO_H264)
&& codecIsAdaptive
|| (oldFormat.width == newFormat.width && oldFormat.height == newFormat.height);
return newFormat.mimeType.equals(oldFormat.mimeType)
&& (codecIsAdaptive
|| (oldFormat.width == newFormat.width && oldFormat.height == newFormat.height));
}
@Override
protected boolean processOutputBuffer(long timeUs, MediaCodec codec, ByteBuffer buffer,
MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) {
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) {
if (shouldSkip) {
skipOutputBuffer(codec, bufferIndex);
return true;
}
long earlyUs = bufferInfo.presentationTimeUs - timeUs;
// Compute how many microseconds it is until the buffer's presentation time.
long elapsedSinceStartOfLoopUs = (SystemClock.elapsedRealtime() * 1000) - elapsedRealtimeUs;
long earlyUs = bufferInfo.presentationTimeUs - positionUs - elapsedSinceStartOfLoopUs;
// Compute the buffer's desired release time in nanoseconds.
long systemTimeNs = System.nanoTime();
long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000);
// Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs;
if (frameReleaseTimeHelper != null) {
adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
} else {
adjustedReleaseTimeNs = unadjustedFrameReleaseTimeNs;
}
if (earlyUs < -30000) {
// We're more than 30ms late rendering the frame.
dropOutputBuffer(codec, bufferIndex);
@ -353,24 +426,37 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
}
if (!renderedFirstFrame) {
renderOutputBuffer(codec, bufferIndex);
renderOutputBufferImmediate(codec, bufferIndex);
renderedFirstFrame = true;
return true;
}
if (getState() == TrackRenderer.STATE_STARTED && earlyUs < 30000) {
if (earlyUs > 11000) {
// We're a little too early to render the frame. Sleep until the frame can be rendered.
// Note: The 11ms threshold was chosen fairly arbitrarily.
try {
// Subtracting 10000 rather than 11000 ensures that the sleep time will be at least 1ms.
Thread.sleep((earlyUs - 10000) / 1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (getState() != TrackRenderer.STATE_STARTED) {
return false;
}
if (Util.SDK_INT >= 21) {
// Let the underlying framework time the release.
if (earlyUs < 50000) {
renderOutputBufferTimedV21(codec, bufferIndex, adjustedReleaseTimeNs);
return true;
}
} else {
// We need to time the release ourselves.
if (earlyUs < 30000) {
if (earlyUs > 11000) {
// We're a little too early to render the frame. Sleep until the frame can be rendered.
// Note: The 11ms threshold was chosen fairly arbitrarily.
try {
// Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms.
Thread.sleep((earlyUs - 10000) / 1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
renderOutputBufferImmediate(codec, bufferIndex);
return true;
}
renderOutputBuffer(codec, bufferIndex);
return true;
}
// We're either not playing, or it's not time to render the frame yet.
@ -391,62 +477,84 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
codecCounters.droppedOutputBufferCount++;
droppedFrameCount++;
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
notifyAndResetDroppedFrameCount();
maybeNotifyDroppedFrameCount();
}
}
private void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
if (lastReportedWidth != currentWidth || lastReportedHeight != currentHeight) {
lastReportedWidth = currentWidth;
lastReportedHeight = currentHeight;
notifyVideoSizeChanged(currentWidth, currentHeight);
}
TraceUtil.beginSection("renderVideoBuffer");
private void renderOutputBufferImmediate(MediaCodec codec, int bufferIndex) {
maybeNotifyVideoSizeChanged();
TraceUtil.beginSection("renderVideoBufferImmediate");
codec.releaseOutputBuffer(bufferIndex, true);
TraceUtil.endSection();
codecCounters.renderedOutputBufferCount++;
if (!drawnToSurface) {
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
maybeNotifyDrawnToSurface();
}
private void notifyVideoSizeChanged(final int width, final int height) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onVideoSizeChanged(width, height);
}
});
}
@TargetApi(21)
private void renderOutputBufferTimedV21(MediaCodec codec, int bufferIndex, long releaseTimeNs) {
maybeNotifyVideoSizeChanged();
TraceUtil.beginSection("releaseOutputBufferTimed");
codec.releaseOutputBuffer(bufferIndex, releaseTimeNs);
TraceUtil.endSection();
codecCounters.renderedOutputBufferCount++;
maybeNotifyDrawnToSurface();
}
private void notifyDrawnToSurface(final Surface surface) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDrawnToSurface(surface);
}
});
private void maybeNotifyVideoSizeChanged() {
if (eventHandler == null || eventListener == null
|| (lastReportedWidth == currentWidth && lastReportedHeight == currentHeight
&& lastReportedPixelWidthHeightRatio == currentPixelWidthHeightRatio)) {
return;
}
// Make final copies to ensure the runnable reports the correct values.
final int currentWidth = this.currentWidth;
final int currentHeight = this.currentHeight;
final float currentPixelWidthHeightRatio = this.currentPixelWidthHeightRatio;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onVideoSizeChanged(currentWidth, currentHeight, currentPixelWidthHeightRatio);
}
});
// Update the last reported values.
lastReportedWidth = currentWidth;
lastReportedHeight = currentHeight;
lastReportedPixelWidthHeightRatio = currentPixelWidthHeightRatio;
}
private void notifyAndResetDroppedFrameCount() {
if (eventHandler != null && eventListener != null && droppedFrameCount > 0) {
long now = SystemClock.elapsedRealtime();
final int countToNotify = droppedFrameCount;
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = now;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
}
});
private void maybeNotifyDrawnToSurface() {
if (eventHandler == null || eventListener == null || reportedDrawnToSurface) {
return;
}
// Make a final copy to ensure the runnable reports the correct surface.
final Surface surface = this.surface;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDrawnToSurface(surface);
}
});
// Record that we have reported that the surface has been drawn to.
reportedDrawnToSurface = true;
}
private void maybeNotifyDroppedFrameCount() {
if (eventHandler == null || eventListener == null || droppedFrameCount == 0) {
return;
}
long now = SystemClock.elapsedRealtime();
// Make final copies to ensure the runnable reports the correct values.
final int countToNotify = droppedFrameCount;
final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
}
});
// Reset the dropped frame tracking.
droppedFrameCount = 0;
droppedFrameAccumulationStartTimeMs = now;
}
}

View File

@ -31,6 +31,9 @@ import java.util.List;
*/
public class MediaFormat {
private static final String KEY_PIXEL_WIDTH_HEIGHT_RATIO =
"com.google.android.videos.pixelWidthHeightRatio";
public static final int NO_VALUE = -1;
public final String mimeType;
@ -38,10 +41,13 @@ public class MediaFormat {
public final int width;
public final int height;
public final float pixelWidthHeightRatio;
public final int channelCount;
public final int sampleRate;
public final int bitrate;
private int maxWidth;
private int maxHeight;
@ -59,14 +65,25 @@ public class MediaFormat {
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, int width,
int height, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, width, height, NO_VALUE, NO_VALUE,
initializationData);
return createVideoFormat(mimeType, maxInputSize, width, height, 1, initializationData);
}
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, int width,
int height, float pixelWidthHeightRatio, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, width, height, pixelWidthHeightRatio, NO_VALUE,
NO_VALUE, NO_VALUE, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount,
int sampleRate, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, channelCount, sampleRate,
initializationData);
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, NO_VALUE, channelCount,
sampleRate, NO_VALUE, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount,
int sampleRate, int bitrate, List<byte[]> initializationData) {
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, NO_VALUE, channelCount,
sampleRate, bitrate, initializationData);
}
@TargetApi(16)
@ -78,6 +95,8 @@ public class MediaFormat {
height = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_HEIGHT);
channelCount = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_CHANNEL_COUNT);
sampleRate = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_SAMPLE_RATE);
bitrate = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_BIT_RATE);
pixelWidthHeightRatio = getOptionalFloatV16(format, KEY_PIXEL_WIDTH_HEIGHT_RATIO);
initializationData = new ArrayList<byte[]>();
for (int i = 0; format.containsKey("csd-" + i); i++) {
ByteBuffer buffer = format.getByteBuffer("csd-" + i);
@ -90,14 +109,17 @@ public class MediaFormat {
maxHeight = NO_VALUE;
}
private MediaFormat(String mimeType, int maxInputSize, int width, int height, int channelCount,
int sampleRate, List<byte[]> initializationData) {
private MediaFormat(String mimeType, int maxInputSize, int width, int height,
float pixelWidthHeightRatio, int channelCount, int sampleRate, int bitrate,
List<byte[]> initializationData) {
this.mimeType = mimeType;
this.maxInputSize = maxInputSize;
this.width = width;
this.height = height;
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.bitrate = bitrate;
this.initializationData = initializationData == null ? Collections.<byte[]>emptyList()
: initializationData;
maxWidth = NO_VALUE;
@ -128,10 +150,12 @@ public class MediaFormat {
result = 31 * result + maxInputSize;
result = 31 * result + width;
result = 31 * result + height;
result = 31 * result + Float.floatToRawIntBits(pixelWidthHeightRatio);
result = 31 * result + maxWidth;
result = 31 * result + maxHeight;
result = 31 * result + channelCount;
result = 31 * result + sampleRate;
result = 31 * result + bitrate;
for (int i = 0; i < initializationData.size(); i++) {
result = 31 * result + Arrays.hashCode(initializationData.get(i));
}
@ -163,9 +187,11 @@ public class MediaFormat {
private boolean equalsInternal(MediaFormat other, boolean ignoreMaxDimensions) {
if (maxInputSize != other.maxInputSize || width != other.width || height != other.height
|| pixelWidthHeightRatio != other.pixelWidthHeightRatio
|| (!ignoreMaxDimensions && (maxWidth != other.maxWidth || maxHeight != other.maxHeight))
|| channelCount != other.channelCount || sampleRate != other.sampleRate
|| !Util.areEqual(mimeType, other.mimeType)
|| bitrate != other.bitrate
|| initializationData.size() != other.initializationData.size()) {
return false;
}
@ -179,8 +205,9 @@ public class MediaFormat {
@Override
public String toString() {
return "MediaFormat(" + mimeType + ", " + maxInputSize + ", " + width + ", " + height + ", " +
channelCount + ", " + sampleRate + ", " + maxWidth + ", " + maxHeight + ")";
return "MediaFormat(" + mimeType + ", " + maxInputSize + ", " + width + ", " + height + ", "
+ pixelWidthHeightRatio + ", " + channelCount + ", " + sampleRate + ", " + bitrate + ", "
+ maxWidth + ", " + maxHeight + ")";
}
/**
@ -196,6 +223,8 @@ public class MediaFormat {
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_HEIGHT, height);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_CHANNEL_COUNT, channelCount);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_SAMPLE_RATE, sampleRate);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_BIT_RATE, bitrate);
maybeSetFloatV16(format, KEY_PIXEL_WIDTH_HEIGHT_RATIO, pixelWidthHeightRatio);
for (int i = 0; i < initializationData.size(); i++) {
format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
@ -221,9 +250,21 @@ public class MediaFormat {
}
@TargetApi(16)
private static final int getOptionalIntegerV16(android.media.MediaFormat format,
String key) {
private static final void maybeSetFloatV16(android.media.MediaFormat format, String key,
float value) {
if (value != NO_VALUE) {
format.setFloat(key, value);
}
}
@TargetApi(16)
private static final int getOptionalIntegerV16(android.media.MediaFormat format, String key) {
return format.containsKey(key) ? format.getInteger(key) : NO_VALUE;
}
@TargetApi(16)
private static final float getOptionalFloatV16(android.media.MediaFormat format, String key) {
return format.containsKey(key) ? format.getFloat(key) : NO_VALUE;
}
}

View File

@ -26,8 +26,12 @@ public class ParserException extends IOException {
super(message);
}
public ParserException(Exception cause) {
public ParserException(Throwable cause) {
super(cause);
}
public ParserException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -23,10 +23,19 @@ import java.nio.ByteBuffer;
public final class SampleHolder {
/**
* Whether a {@link SampleSource} is permitted to replace {@link #data} if its current value is
* null or of insufficient size to hold the sample.
* Disallows buffer replacement.
*/
public final boolean allowDataBufferReplacement;
public static final int BUFFER_REPLACEMENT_MODE_DISABLED = 0;
/**
* Allows buffer replacement using {@link ByteBuffer#allocate(int)}.
*/
public static final int BUFFER_REPLACEMENT_MODE_NORMAL = 1;
/**
* Allows buffer replacement using {@link ByteBuffer#allocateDirect(int)}.
*/
public static final int BUFFER_REPLACEMENT_MODE_DIRECT = 2;
public final CryptoInfo cryptoInfo;
@ -57,12 +66,34 @@ public final class SampleHolder {
*/
public boolean decodeOnly;
private final int bufferReplacementMode;
/**
* @param allowDataBufferReplacement See {@link #allowDataBufferReplacement}.
* @param bufferReplacementMode Determines the behavior of {@link #replaceBuffer(int)}. One of
* {@link #BUFFER_REPLACEMENT_MODE_DISABLED}, {@link #BUFFER_REPLACEMENT_MODE_NORMAL} and
* {@link #BUFFER_REPLACEMENT_MODE_DIRECT}.
*/
public SampleHolder(boolean allowDataBufferReplacement) {
public SampleHolder(int bufferReplacementMode) {
this.cryptoInfo = new CryptoInfo();
this.allowDataBufferReplacement = allowDataBufferReplacement;
this.bufferReplacementMode = bufferReplacementMode;
}
/**
* Attempts to replace {@link #data} with a {@link ByteBuffer} of the specified capacity.
*
* @param capacity The capacity of the replacement buffer, in bytes.
* @return True if the buffer was replaced. False otherwise.
*/
public boolean replaceBuffer(int capacity) {
switch (bufferReplacementMode) {
case BUFFER_REPLACEMENT_MODE_NORMAL:
data = ByteBuffer.allocate(capacity);
return true;
case BUFFER_REPLACEMENT_MODE_DIRECT:
data = ByteBuffer.allocateDirect(capacity);
return true;
}
return false;
}
}

View File

@ -85,9 +85,9 @@ public interface SampleSource {
* This method should not be called until after the source has been successfully prepared.
*
* @param track The track to enable.
* @param timeUs The player's current playback position.
* @param positionUs The player's current playback position.
*/
public void enable(int track, long timeUs);
public void enable(int track, long positionUs);
/**
* Disable the specified track.
@ -101,12 +101,12 @@ public interface SampleSource {
/**
* Indicates to the source that it should still be buffering data.
*
* @param playbackPositionUs The current playback position.
* @param positionUs The current playback position.
* @return True if the source has available samples, or if the end of the stream has been reached.
* False if more data needs to be buffered for samples to become available.
* @throws IOException If an error occurred reading from the source.
*/
public boolean continueBuffering(long playbackPositionUs) throws IOException;
public boolean continueBuffering(long positionUs) throws IOException;
/**
* Attempts to read either a sample, a new format or or a discontinuity from the source.
@ -118,7 +118,7 @@ public interface SampleSource {
* than the one for which data was requested.
*
* @param track The track from which to read.
* @param playbackPositionUs The current playback position.
* @param positionUs The current playback position.
* @param formatHolder A {@link MediaFormatHolder} object to populate in the case of a new format.
* @param sampleHolder A {@link SampleHolder} object to populate in the case of a new sample. If
* the caller requires the sample data then it must ensure that {@link SampleHolder#data}
@ -129,7 +129,7 @@ public interface SampleSource {
* {@link #DISCONTINUITY_READ}, {@link #NOTHING_READ} or {@link #END_OF_STREAM}.
* @throws IOException If an error occurred reading from the source.
*/
public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder,
public int readData(int track, long positionUs, MediaFormatHolder formatHolder,
SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException;
/**
@ -137,16 +137,16 @@ public interface SampleSource {
* <p>
* This method should not be called until after the source has been successfully prepared.
*
* @param timeUs The seek position in microseconds.
* @param positionUs The seek position in microseconds.
*/
public void seekToUs(long timeUs);
public void seekToUs(long positionUs);
/**
* Returns an estimate of the position up to which data is buffered.
* <p>
* This method should not be called until after the source has been successfully prepared.
*
* @return An estimate of the absolute position in micro-seconds up to which data is buffered,
* @return An estimate of the absolute position in microseconds up to which data is buffered,
* or {@link TrackRenderer#END_OF_TRACK_US} if data is buffered to the end of the stream, or
* {@link TrackRenderer#UNKNOWN_TIME_US} if no estimate is available.
*/

View File

@ -0,0 +1,180 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer.FrameReleaseTimeHelper;
import android.annotation.TargetApi;
import android.view.Choreographer;
import android.view.Choreographer.FrameCallback;
/**
* Makes a best effort to adjust frame release timestamps for a smoother visual result.
*/
@TargetApi(16)
public class SmoothFrameReleaseTimeHelper implements FrameReleaseTimeHelper, FrameCallback {
private static final long CHOREOGRAPHER_SAMPLE_DELAY_MILLIS = 500;
private static final long MAX_ALLOWED_DRIFT_NS = 20000000;
private static final long VSYNC_OFFSET_PERCENTAGE = 80;
private static final int MIN_FRAMES_FOR_ADJUSTMENT = 6;
private final boolean usePrimaryDisplayVsync;
private final long vsyncDurationNs;
private final long vsyncOffsetNs;
private Choreographer choreographer;
private long sampledVsyncTimeNs;
private long lastUnadjustedFrameTimeUs;
private long adjustedLastFrameTimeNs;
private long pendingAdjustedFrameTimeNs;
private boolean haveSync;
private long syncReleaseTimeNs;
private long syncFrameTimeNs;
private int frameCount;
/**
* @param primaryDisplayRefreshRate The refresh rate of the default display.
* @param usePrimaryDisplayVsync Whether to snap to the primary display vsync. May not be
* suitable when rendering to secondary displays.
*/
public SmoothFrameReleaseTimeHelper(
float primaryDisplayRefreshRate, boolean usePrimaryDisplayVsync) {
this.usePrimaryDisplayVsync = usePrimaryDisplayVsync;
if (usePrimaryDisplayVsync) {
vsyncDurationNs = (long) (1000000000d / primaryDisplayRefreshRate);
vsyncOffsetNs = (vsyncDurationNs * VSYNC_OFFSET_PERCENTAGE) / 100;
} else {
vsyncDurationNs = -1;
vsyncOffsetNs = -1;
}
}
@Override
public void enable() {
haveSync = false;
if (usePrimaryDisplayVsync) {
sampledVsyncTimeNs = 0;
choreographer = Choreographer.getInstance();
choreographer.postFrameCallback(this);
}
}
@Override
public void disable() {
if (usePrimaryDisplayVsync) {
choreographer.removeFrameCallback(this);
choreographer = null;
}
}
@Override
public void doFrame(long vsyncTimeNs) {
sampledVsyncTimeNs = vsyncTimeNs;
choreographer.postFrameCallbackDelayed(this, CHOREOGRAPHER_SAMPLE_DELAY_MILLIS);
}
@Override
public long adjustReleaseTime(long unadjustedFrameTimeUs, long unadjustedReleaseTimeNs) {
long unadjustedFrameTimeNs = unadjustedFrameTimeUs * 1000;
// Until we know better, the adjustment will be a no-op.
long adjustedFrameTimeNs = unadjustedFrameTimeNs;
long adjustedReleaseTimeNs = unadjustedReleaseTimeNs;
if (haveSync) {
// See if we've advanced to the next frame.
if (unadjustedFrameTimeUs != lastUnadjustedFrameTimeUs) {
frameCount++;
adjustedLastFrameTimeNs = pendingAdjustedFrameTimeNs;
}
if (frameCount >= MIN_FRAMES_FOR_ADJUSTMENT) {
// We're synced and have waited the required number of frames to apply an adjustment.
// Calculate the average frame time across all the frames we've seen since the last sync.
// This will typically give us a framerate at a finer granularity than the frame times
// themselves (which often only have millisecond granularity).
long averageFrameTimeNs = (unadjustedFrameTimeNs - syncFrameTimeNs) / frameCount;
// Project the adjusted frame time forward using the average.
long candidateAdjustedFrameTimeNs = adjustedLastFrameTimeNs + averageFrameTimeNs;
if (isDriftTooLarge(candidateAdjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
haveSync = false;
} else {
adjustedFrameTimeNs = candidateAdjustedFrameTimeNs;
adjustedReleaseTimeNs = syncReleaseTimeNs + adjustedFrameTimeNs - syncFrameTimeNs;
}
} else {
// We're synced but haven't waited the required number of frames to apply an adjustment.
// Check drift anyway.
if (isDriftTooLarge(unadjustedFrameTimeNs, unadjustedReleaseTimeNs)) {
haveSync = false;
}
}
}
// If we need to sync, do so now.
if (!haveSync) {
syncFrameTimeNs = unadjustedFrameTimeNs;
syncReleaseTimeNs = unadjustedReleaseTimeNs;
frameCount = 0;
haveSync = true;
onSynced();
}
lastUnadjustedFrameTimeUs = unadjustedFrameTimeUs;
pendingAdjustedFrameTimeNs = adjustedFrameTimeNs;
if (sampledVsyncTimeNs == 0) {
return adjustedReleaseTimeNs;
}
// Find the timestamp of the closest vsync. This is the vsync that we're targeting.
long snappedTimeNs = closestVsync(adjustedReleaseTimeNs, sampledVsyncTimeNs, vsyncDurationNs);
// Apply an offset so that we release before the target vsync, but after the previous one.
return snappedTimeNs - vsyncOffsetNs;
}
protected void onSynced() {
// Do nothing.
}
private boolean isDriftTooLarge(long frameTimeNs, long releaseTimeNs) {
long elapsedFrameTimeNs = frameTimeNs - syncFrameTimeNs;
long elapsedReleaseTimeNs = releaseTimeNs - syncReleaseTimeNs;
return Math.abs(elapsedReleaseTimeNs - elapsedFrameTimeNs) > MAX_ALLOWED_DRIFT_NS;
}
private static long closestVsync(long releaseTime, long sampledVsyncTime, long vsyncDuration) {
long vsyncCount = (releaseTime - sampledVsyncTime) / vsyncDuration;
long snappedTimeNs = sampledVsyncTime + (vsyncDuration * vsyncCount);
long snappedBeforeNs;
long snappedAfterNs;
if (releaseTime <= snappedTimeNs) {
snappedBeforeNs = snappedTimeNs - vsyncDuration;
snappedAfterNs = snappedTimeNs;
} else {
snappedBeforeNs = snappedTimeNs;
snappedAfterNs = snappedTimeNs + vsyncDuration;
}
long snappedAfterDiff = snappedAfterNs - releaseTime;
long snappedBeforeDiff = releaseTime - snappedBeforeNs;
return snappedAfterDiff < snappedBeforeDiff ? snappedAfterNs : snappedBeforeNs;
}
}

View File

@ -20,9 +20,21 @@ package com.google.android.exoplayer;
*/
public final class TrackInfo {
/**
* The mime type.
*/
public final String mimeType;
/**
* The duration in microseconds, or {@link C#UNKNOWN_TIME_US} if the duration is unknown.
*/
public final long durationUs;
/**
* @param mimeType The mime type.
* @param durationUs The duration in microseconds, or {@link C#UNKNOWN_TIME_US} if the duration
* is unknown.
*/
public TrackInfo(String mimeType, long durationUs) {
this.mimeType = mimeType;
this.durationUs = durationUs;

View File

@ -18,6 +18,8 @@ package com.google.android.exoplayer;
import com.google.android.exoplayer.ExoPlayer.ExoPlayerComponent;
import com.google.android.exoplayer.util.Assertions;
import android.os.SystemClock;
/**
* Renders a single component of media.
*
@ -59,15 +61,15 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
*/
protected static final int STATE_ENABLED = 2;
/**
* The renderer is started. Calls to {@link #doSomeWork(long)} should cause the media to be
* The renderer is started. Calls to {@link #doSomeWork(long, long)} should cause the media to be
* rendered.
*/
protected static final int STATE_STARTED = 3;
/**
* Represents an unknown time or duration.
* Represents an unknown time or duration. Equal to {@link C#UNKNOWN_TIME_US}.
*/
public static final long UNKNOWN_TIME_US = -1;
public static final long UNKNOWN_TIME_US = C.UNKNOWN_TIME_US; // -1
/**
* Represents a time or duration that should match the duration of the longest track whose
* duration is known.
@ -83,9 +85,9 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
/**
* A time source renderer is a renderer that, when started, advances its own playback position.
* This means that {@link #getCurrentPositionUs()} will return increasing positions independently
* to increasing values being passed to {@link #doSomeWork(long)}. A player may have at most one
* time source renderer. If provided, the player will use such a renderer as its source of time
* during playback.
* to increasing values being passed to {@link #doSomeWork(long, long)}. A player may have at most
* one time source renderer. If provided, the player will use such a renderer as its source of
* time during playback.
* <p>
* This method may be called when the renderer is in any state.
*
@ -136,15 +138,15 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
/**
* Enable the renderer.
*
* @param timeUs The player's current position.
* @param positionUs The player's current position.
* @param joining Whether this renderer is being enabled to join an ongoing playback. If true
* then {@link #start} must be called immediately after this method returns (unless a
* {@link ExoPlaybackException} is thrown).
*/
/* package */ final void enable(long timeUs, boolean joining) throws ExoPlaybackException {
/* package */ final void enable(long positionUs, boolean joining) throws ExoPlaybackException {
Assertions.checkState(state == TrackRenderer.STATE_PREPARED);
state = TrackRenderer.STATE_ENABLED;
onEnabled(timeUs, joining);
onEnabled(positionUs, joining);
}
/**
@ -152,18 +154,18 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
* <p>
* The default implementation is a no-op.
*
* @param timeUs The player's current position.
* @param positionUs The player's current position.
* @param joining Whether this renderer is being enabled to join an ongoing playback. If true
* then {@link #onStarted} is guaranteed to be called immediately after this method returns
* (unless a {@link ExoPlaybackException} is thrown).
* @throws ExoPlaybackException If an error occurs.
*/
protected void onEnabled(long timeUs, boolean joining) throws ExoPlaybackException {
protected void onEnabled(long positionUs, boolean joining) throws ExoPlaybackException {
// Do nothing.
}
/**
* Starts the renderer, meaning that calls to {@link #doSomeWork(long)} will cause the
* Starts the renderer, meaning that calls to {@link #doSomeWork(long, long)} will cause the
* track to be rendered.
*/
/* package */ final void start() throws ExoPlaybackException {
@ -289,10 +291,14 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}
*
* @param timeUs The current playback time.
* @param positionUs The current media time in microseconds, measured at the start of the
* current iteration of the rendering loop.
* @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at
* the start of the current iteration of the rendering loop.
* @throws ExoPlaybackException If an error occurs.
*/
protected abstract void doSomeWork(long timeUs) throws ExoPlaybackException;
protected abstract void doSomeWork(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException;
/**
* Returns the duration of the media being rendered.
@ -300,7 +306,7 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_PREPARED}, {@link #STATE_ENABLED}, {@link #STATE_STARTED}
*
* @return The duration of the track in micro-seconds, or {@link #MATCH_LONGEST_US} if
* @return The duration of the track in microseconds, or {@link #MATCH_LONGEST_US} if
* the track's duration should match that of the longest track whose duration is known, or
* or {@link #UNKNOWN_TIME_US} if the duration is not known.
*/
@ -312,17 +318,17 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}
*
* @return The current playback position in micro-seconds.
* @return The current playback position in microseconds.
*/
protected abstract long getCurrentPositionUs();
/**
* Returns an estimate of the absolute position in micro-seconds up to which data is buffered.
* Returns an estimate of the absolute position in microseconds up to which data is buffered.
* <p>
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}
*
* @return An estimate of the absolute position in micro-seconds up to which data is buffered,
* @return An estimate of the absolute position in microseconds up to which data is buffered,
* or {@link #END_OF_TRACK_US} if the track is fully buffered, or {@link #UNKNOWN_TIME_US} if
* no estimate is available.
*/
@ -334,10 +340,10 @@ public abstract class TrackRenderer implements ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}
*
* @param timeUs The desired time in micro-seconds.
* @param positionUs The desired playback position in microseconds.
* @throws ExoPlaybackException If an error occurs.
*/
protected abstract void seekTo(long timeUs) throws ExoPlaybackException;
protected abstract void seekTo(long positionUs) throws ExoPlaybackException;
@Override
public void handleMessage(int what, Object object) throws ExoPlaybackException {

View File

@ -0,0 +1,97 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.audio;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import java.util.HashSet;
import java.util.Set;
/**
* Represents the set of audio formats a device is capable of playing back.
*/
@TargetApi(21)
public final class AudioCapabilities {
private final Set<Integer> supportedEncodings;
private final int maxChannelCount;
/**
* Constructs new audio capabilities based on a set of supported encodings and a maximum channel
* count.
*
* @param supportedEncodings Supported audio encodings from {@link android.media.AudioFormat}'s
* {@code ENCODING_*} constants.
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
*/
public AudioCapabilities(int[] supportedEncodings, int maxChannelCount) {
this.supportedEncodings = new HashSet<Integer>();
if (supportedEncodings != null) {
for (int i : supportedEncodings) {
this.supportedEncodings.add(i);
}
}
this.maxChannelCount = maxChannelCount;
}
/** Returns whether the device supports playback of AC-3. */
public boolean supportsAc3() {
return Util.SDK_INT >= 21 && supportedEncodings.contains(AudioFormat.ENCODING_AC3);
}
/** Returns whether the device supports playback of enhanced AC-3. */
public boolean supportsEAc3() {
return Util.SDK_INT >= 21 && supportedEncodings.contains(AudioFormat.ENCODING_E_AC3);
}
/** Returns whether the device supports playback of 16-bit PCM. */
public boolean supportsPcm() {
return supportedEncodings.contains(AudioFormat.ENCODING_PCM_16BIT);
}
/** Returns the maximum number of channels the device can play at the same time. */
public int getMaxChannelCount() {
return maxChannelCount;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof AudioCapabilities)) {
return false;
}
AudioCapabilities audioCapabilities = (AudioCapabilities) other;
return supportedEncodings.equals(audioCapabilities.supportedEncodings)
&& maxChannelCount == audioCapabilities.maxChannelCount;
}
@Override
public int hashCode() {
return maxChannelCount + 31 * supportedEncodings.hashCode();
}
@Override
public String toString() {
return "AudioCapabilities[maxChannelCount=" + maxChannelCount
+ ", supportedEncodings=" + supportedEncodings + "]";
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.audio;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioFormat;
import android.media.AudioManager;
/**
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
* receiving notifications, and {@link #unregister} to stop.
*/
public final class AudioCapabilitiesReceiver {
/** Listener notified when audio capabilities change. */
public interface Listener {
/** Called when the audio capabilities change. */
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
}
/** Default to stereo PCM on SDK < 21 and when HDMI is unplugged. */
private static final AudioCapabilities DEFAULT_AUDIO_CAPABILITIES =
new AudioCapabilities(new int[] {AudioFormat.ENCODING_PCM_16BIT}, 2);
private final Context context;
private final Listener listener;
private final BroadcastReceiver receiver;
/**
* Constructs a new audio capabilities receiver.
*
* @param context Application context for registering to receive broadcasts.
* @param listener Listener to notify when audio capabilities change.
*/
public AudioCapabilitiesReceiver(Context context, Listener listener) {
this.context = Assertions.checkNotNull(context);
this.listener = Assertions.checkNotNull(listener);
this.receiver = Util.SDK_INT >= 21 ? new HdmiAudioPlugBroadcastReceiver() : null;
}
/**
* Registers to notify the listener when audio capabilities change. The listener will immediately
* receive the current audio capabilities. It is important to call {@link #unregister} so that
* the listener can be garbage collected.
*/
@TargetApi(21)
public void register() {
if (receiver != null) {
context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
}
listener.onAudioCapabilitiesChanged(DEFAULT_AUDIO_CAPABILITIES);
}
/** Unregisters to stop notifying the listener when audio capabilities change. */
public void unregister() {
if (receiver != null) {
context.unregisterReceiver(receiver);
}
}
@TargetApi(21)
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (!action.equals(AudioManager.ACTION_HDMI_AUDIO_PLUG)) {
return;
}
listener.onAudioCapabilitiesChanged(
new AudioCapabilities(intent.getIntArrayExtra(AudioManager.EXTRA_ENCODINGS),
intent.getIntExtra(AudioManager.EXTRA_MAX_CHANNEL_COUNT, 0)));
}
}
}

View File

@ -0,0 +1,738 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.audio;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTimestamp;
import android.media.MediaFormat;
import android.os.ConditionVariable;
import android.util.Log;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
/**
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
* playback position smoothing, non-blocking writes and reconfiguration.
*
* <p>If {@link #isInitialized} returns {@code false}, the instance can be {@link #initialize}d.
* After initialization, start playback by calling {@link #play}.
*
* <p>Call {@link #handleBuffer} to write data for playback.
*
* <p>Call {@link #handleDiscontinuity} when a buffer is skipped.
*
* <p>Call {@link #reconfigure} when the output format changes.
*
* <p>Call {@link #reset} to free resources. It is safe to re-{@link #initialize} the instance.
*/
@TargetApi(16)
public final class AudioTrack {
/**
* Thrown when a failure occurs instantiating an {@link android.media.AudioTrack}.
*/
public static class InitializationException extends Exception {
/** The state as reported by {@link android.media.AudioTrack#getState()}. */
public final int audioTrackState;
public InitializationException(
int audioTrackState, int sampleRate, int channelConfig, int bufferSize) {
super("AudioTrack init failed: " + audioTrackState + ", Config(" + sampleRate + ", "
+ channelConfig + ", " + bufferSize + ")");
this.audioTrackState = audioTrackState;
}
}
/** Returned in the result of {@link #handleBuffer} if the buffer was discontinuous. */
public static final int RESULT_POSITION_DISCONTINUITY = 1;
/** Returned in the result of {@link #handleBuffer} if the buffer can be released. */
public static final int RESULT_BUFFER_CONSUMED = 2;
/** Represents an unset {@link android.media.AudioTrack} session identifier. */
public static final int SESSION_ID_NOT_SET = 0;
/** The default multiplication factor used when determining the size of the track's buffer. */
public static final float DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR = 4;
/** Returned by {@link #getCurrentPositionUs} when the position is not set. */
public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
private static final String TAG = "AudioTrack";
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
* than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 10 * C.MICROS_PER_SECOND;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_LATENCY_US = 10 * C.MICROS_PER_SECOND;
/** Value for ac3Bitrate before the bitrate has been calculated. */
private static final int UNKNOWN_AC3_BITRATE = 0;
private static final int START_NOT_SET = 0;
private static final int START_IN_SYNC = 1;
private static final int START_NEED_SYNC = 2;
private static final int MAX_PLAYHEAD_OFFSET_COUNT = 10;
private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
private final ConditionVariable releasingConditionVariable;
private final AudioTimestampCompat audioTimestampCompat;
private final long[] playheadOffsets;
private final float minBufferMultiplicationFactor;
private android.media.AudioTrack audioTrack;
private int sampleRate;
private int channelConfig;
private int encoding;
private int frameSize;
private int minBufferSize;
private int bufferSize;
private int nextPlayheadOffsetIndex;
private int playheadOffsetCount;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
private boolean audioTimestampSet;
private long lastTimestampSampleTimeUs;
private long lastRawPlaybackHeadPosition;
private long rawPlaybackHeadWrapCount;
private Method getLatencyMethod;
private long submittedBytes;
private int startMediaTimeState;
private long startMediaTimeUs;
private long resumeSystemTimeUs;
private long latencyUs;
private float volume;
private byte[] temporaryBuffer;
private int temporaryBufferOffset;
private int temporaryBufferSize;
private boolean isAc3;
/** Bitrate measured in kilobits per second, if {@link #isAc3} is true. */
private int ac3Bitrate;
/** Constructs an audio track using the default minimum buffer size multiplier. */
public AudioTrack() {
this(DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR);
}
/** Constructs an audio track using the specified minimum buffer size multiplier. */
public AudioTrack(float minBufferMultiplicationFactor) {
Assertions.checkArgument(minBufferMultiplicationFactor >= 1);
this.minBufferMultiplicationFactor = minBufferMultiplicationFactor;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 19) {
audioTimestampCompat = new AudioTimestampCompatV19();
} else {
audioTimestampCompat = new NoopAudioTimestampCompat();
}
if (Util.SDK_INT >= 18) {
try {
getLatencyMethod =
android.media.AudioTrack.class.getMethod("getLatency", (Class<?>[]) null);
} catch (NoSuchMethodException e) {
// There's no guarantee this method exists. Do nothing.
}
}
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
}
/**
* Returns whether the audio track has been successfully initialized via {@link #initialize} and
* not yet {@link #reset}.
*/
public boolean isInitialized() {
return audioTrack != null;
}
/**
* Returns the playback position in the stream starting at zero, in microseconds, or
* {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
*
* <p>If the device supports it, the method uses the playback timestamp from
* {@link android.media.AudioTrack#getTimestamp}. Otherwise, it derives a smoothed position by
* sampling the {@link android.media.AudioTrack}'s frame position.
*
* @param sourceEnded Specify {@code true} if no more input buffers will be provided.
* @return The playback position relative to the start of playback, in microseconds.
*/
public long getCurrentPositionUs(boolean sourceEnded) {
if (!hasCurrentPositionUs()) {
return CURRENT_POSITION_NOT_SET;
}
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PLAYING) {
maybeSampleSyncParams();
}
long systemClockUs = System.nanoTime() / 1000;
long currentPositionUs;
if (audioTimestampSet) {
// How long ago in the past the audio timestamp is (negative if it's in the future).
long presentationDiff = systemClockUs - (audioTimestampCompat.getNanoTime() / 1000);
long framesDiff = durationUsToFrames(presentationDiff);
// The position of the frame that's currently being presented.
long currentFramePosition = audioTimestampCompat.getFramePosition() + framesDiff;
currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs = getPlaybackPositionUs() + startMediaTimeUs;
} else {
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
currentPositionUs = systemClockUs + smoothedPlayheadOffsetUs + startMediaTimeUs;
}
if (!sourceEnded) {
currentPositionUs -= latencyUs;
}
}
return currentPositionUs;
}
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @return The audio track session identifier.
*/
public int initialize() throws InitializationException {
return initialize(SESSION_ID_NOT_SET);
}
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @param sessionId Audio track session identifier to re-use, or {@link #SESSION_ID_NOT_SET} to
* create a new one.
* @return The new (or re-used) session identifier.
*/
public int initialize(int sessionId) throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
releasingConditionVariable.block();
if (sessionId == SESSION_ID_NOT_SET) {
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM);
} else {
// Re-attach to the same audio session.
audioTrack = new android.media.AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
}
checkAudioTrackInitialized();
setVolume(volume);
return audioTrack.getAudioSessionId();
}
/**
* Reconfigures the audio track to play back media in {@code format}. The encoding is assumed to
* be {@link AudioFormat#ENCODING_PCM_16BIT}.
*/
public void reconfigure(MediaFormat format) {
reconfigure(format, AudioFormat.ENCODING_PCM_16BIT, 0);
}
/**
* Reconfigures the audio track to play back media in {@code format}. Buffers passed to
* {@link #handleBuffer} must using the specified {@code encoding}, which should be a constant
* from {@link AudioFormat}.
*
* @param format Specifies the channel count and sample rate to play back.
* @param encoding The format in which audio is represented.
* @param bufferSize The total size of the playback buffer in bytes. Specify 0 to use a buffer
* size based on the minimum for format.
*/
@SuppressLint("InlinedApi")
public void reconfigure(MediaFormat format, int encoding, int bufferSize) {
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig;
switch (channelCount) {
case 1:
channelConfig = AudioFormat.CHANNEL_OUT_MONO;
break;
case 2:
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
break;
case 6:
channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
break;
case 8:
channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
break;
default:
throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
}
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
// TODO: Does channelConfig determine channelCount?
boolean isAc3 = encoding == AudioFormat.ENCODING_AC3 || encoding == AudioFormat.ENCODING_E_AC3;
if (audioTrack != null && this.sampleRate == sampleRate
&& this.channelConfig == channelConfig && !this.isAc3 && !isAc3) {
// We already have an existing audio track with the correct sample rate and channel config.
return;
}
reset();
minBufferSize = android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding);
this.encoding = encoding;
this.bufferSize =
bufferSize == 0 ? (int) (minBufferMultiplicationFactor * minBufferSize) : bufferSize;
this.sampleRate = sampleRate;
this.channelConfig = channelConfig;
this.isAc3 = isAc3;
ac3Bitrate = UNKNOWN_AC3_BITRATE; // Calculated on receiving the first buffer if isAc3 is true.
frameSize = 2 * channelCount; // 2 bytes per 16 bit sample * number of channels.
}
/** Starts/resumes playing audio if the audio track has been initialized. */
public void play() {
if (isInitialized()) {
resumeSystemTimeUs = System.nanoTime() / 1000;
audioTrack.play();
}
}
/** Signals to the audio track that the next buffer is discontinuous with the previous buffer. */
public void handleDiscontinuity() {
// Force resynchronization after a skipped buffer.
if (startMediaTimeState == START_IN_SYNC) {
startMediaTimeState = START_NEED_SYNC;
}
}
/**
* Attempts to write {@code size} bytes from {@code buffer} at {@code offset} to the audio track.
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released
* (due to having been written), and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was
* discontinuous with previously written data.
*
* @param buffer The buffer containing audio data to play back.
* @param offset The offset in the buffer from which to consume data.
* @param size The number of bytes to consume from {@code buffer}.
* @param presentationTimeUs Presentation timestamp of the next buffer in microseconds.
* @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
* {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
* written data.
*/
public int handleBuffer(ByteBuffer buffer, int offset, int size, long presentationTimeUs) {
int result = 0;
if (temporaryBufferSize == 0 && size != 0) {
if (isAc3 && ac3Bitrate == UNKNOWN_AC3_BITRATE) {
// Each AC-3 buffer contains 1536 frames of audio, so the AudioTrack playback position
// advances by 1536 per buffer (32 ms at 48 kHz). Calculate the bitrate in kbit/s.
int unscaledAc3Bitrate = size * 8 * sampleRate;
int divisor = 1000 * 1536;
ac3Bitrate = (unscaledAc3Bitrate + divisor / 2) / divisor;
}
// This is the first time we've seen this {@code buffer}.
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
long bufferStartTime = presentationTimeUs - framesToDurationUs(bytesToFrames(size));
if (startMediaTimeUs == START_NOT_SET) {
startMediaTimeUs = Math.max(0, bufferStartTime);
startMediaTimeState = START_IN_SYNC;
} else {
// Sanity check that bufferStartTime is consistent with the expected value.
long expectedBufferStartTime = startMediaTimeUs
+ framesToDurationUs(bytesToFrames(submittedBytes));
if (startMediaTimeState == START_IN_SYNC
&& Math.abs(expectedBufferStartTime - bufferStartTime) > 200000) {
Log.e(TAG, "Discontinuity detected [expected " + expectedBufferStartTime + ", got "
+ bufferStartTime + "]");
startMediaTimeState = START_NEED_SYNC;
}
if (startMediaTimeState == START_NEED_SYNC) {
// Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
// number of bytes submitted.
startMediaTimeUs += (bufferStartTime - expectedBufferStartTime);
startMediaTimeState = START_IN_SYNC;
result = RESULT_POSITION_DISCONTINUITY;
}
}
}
if (size == 0) {
return result;
}
if (temporaryBufferSize == 0) {
temporaryBufferSize = size;
buffer.position(offset);
if (Util.SDK_INT < 21) {
// Copy {@code buffer} into {@code temporaryBuffer}.
if (temporaryBuffer == null || temporaryBuffer.length < size) {
temporaryBuffer = new byte[size];
}
buffer.get(temporaryBuffer, 0, size);
temporaryBufferOffset = 0;
}
}
int bytesWritten = 0;
if (Util.SDK_INT < 21) {
// Work out how many bytes we can write without the risk of blocking.
int bytesPending = (int) (submittedBytes - framesToBytes(getPlaybackPositionFrames()));
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) {
bytesToWrite = Math.min(temporaryBufferSize, bytesToWrite);
bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
if (bytesWritten < 0) {
Log.w(TAG, "AudioTrack.write returned error code: " + bytesWritten);
} else {
temporaryBufferOffset += bytesWritten;
}
}
} else {
bytesWritten = writeNonBlockingV21(audioTrack, buffer, temporaryBufferSize);
}
temporaryBufferSize -= bytesWritten;
submittedBytes += bytesWritten;
if (temporaryBufferSize == 0) {
result |= RESULT_BUFFER_CONSUMED;
}
return result;
}
@TargetApi(21)
private static int writeNonBlockingV21(
android.media.AudioTrack audioTrack, ByteBuffer buffer, int size) {
return audioTrack.write(buffer, size, android.media.AudioTrack.WRITE_NON_BLOCKING);
}
/** Returns whether the audio track has more data pending that will be played back. */
public boolean hasPendingData() {
return audioTrack != null && bytesToFrames(submittedBytes) > getPlaybackPositionFrames();
}
/** Returns whether enough data has been supplied via {@link #handleBuffer} to begin playback. */
public boolean hasEnoughDataToBeginPlayback() {
return submittedBytes >= minBufferSize;
}
/** Sets the playback volume. */
public void setVolume(float volume) {
this.volume = volume;
if (audioTrack != null) {
if (Util.SDK_INT >= 21) {
setVolumeV21(audioTrack, volume);
} else {
setVolumeV3(audioTrack, volume);
}
}
}
@TargetApi(21)
private static void setVolumeV21(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setVolume(volume);
}
@SuppressWarnings("deprecation")
private static void setVolumeV3(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume);
}
/** Pauses playback. */
public void pause() {
if (audioTrack != null) {
resetSyncParams();
audioTrack.pause();
}
}
/**
* Releases resources associated with this instance asynchronously. Calling {@link #initialize}
* will block until the audio track has been released, so it is safe to initialize immediately
* after resetting.
*/
public void reset() {
if (audioTrack != null) {
submittedBytes = 0;
temporaryBufferSize = 0;
lastRawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
startMediaTimeUs = START_NOT_SET;
resetSyncParams();
int playState = audioTrack.getPlayState();
if (playState == android.media.AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.pause();
}
// AudioTrack.release can take some time, so we call it on a background thread.
final android.media.AudioTrack toRelease = audioTrack;
audioTrack = null;
releasingConditionVariable.close();
new Thread() {
@Override
public void run() {
try {
toRelease.release();
} finally {
releasingConditionVariable.open();
}
}
}.start();
}
}
/** Returns whether {@link #getCurrentPositionUs} can return the current playback position. */
private boolean hasCurrentPositionUs() {
return isInitialized() && startMediaTimeUs != START_NOT_SET;
}
/** Updates the audio track latency and playback position parameters. */
private void maybeSampleSyncParams() {
long playbackPositionUs = getPlaybackPositionUs();
if (playbackPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
}
long systemClockUs = System.nanoTime() / 1000;
if (systemClockUs - lastPlayheadSampleTimeUs >= MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US) {
// Take a new sample and update the smoothed offset between the system clock and the playhead.
playheadOffsets[nextPlayheadOffsetIndex] = playbackPositionUs - systemClockUs;
nextPlayheadOffsetIndex = (nextPlayheadOffsetIndex + 1) % MAX_PLAYHEAD_OFFSET_COUNT;
if (playheadOffsetCount < MAX_PLAYHEAD_OFFSET_COUNT) {
playheadOffsetCount++;
}
lastPlayheadSampleTimeUs = systemClockUs;
smoothedPlayheadOffsetUs = 0;
for (int i = 0; i < playheadOffsetCount; i++) {
smoothedPlayheadOffsetUs += playheadOffsets[i] / playheadOffsetCount;
}
}
if (systemClockUs - lastTimestampSampleTimeUs >= MIN_TIMESTAMP_SAMPLE_INTERVAL_US) {
audioTimestampSet = audioTimestampCompat.update(audioTrack);
if (audioTimestampSet) {
// Perform sanity checks on the timestamp.
long audioTimestampUs = audioTimestampCompat.getNanoTime() / 1000;
if (audioTimestampUs < resumeSystemTimeUs) {
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet = false;
} else if (Math.abs(audioTimestampUs - systemClockUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
// The timestamp time base is probably wrong.
audioTimestampSet = false;
Log.w(TAG, "Spurious audio timestamp: " + audioTimestampCompat.getFramePosition() + ", "
+ audioTimestampUs + ", " + systemClockUs);
}
}
if (getLatencyMethod != null) {
try {
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
latencyUs = (Integer) getLatencyMethod.invoke(audioTrack, (Object[]) null) * 1000L
- framesToDurationUs(bytesToFrames(bufferSize));
// Sanity check that the latency is non-negative.
latencyUs = Math.max(latencyUs, 0);
// Sanity check that the latency isn't too large.
if (latencyUs > MAX_LATENCY_US) {
Log.w(TAG, "Ignoring impossibly large audio latency: " + latencyUs);
latencyUs = 0;
}
} catch (Exception e) {
// The method existed, but doesn't work. Don't try again.
getLatencyMethod = null;
}
}
lastTimestampSampleTimeUs = systemClockUs;
}
}
/**
* Checks that {@link #audioTrack} has been successfully initialized. If it has then calling this
* method is a no-op. If it hasn't then {@link #audioTrack} is released and set to null, and an
* exception is thrown.
*
* @throws InitializationException If {@link #audioTrack} has not been successfully initialized.
*/
private void checkAudioTrackInitialized() throws InitializationException {
int state = audioTrack.getState();
if (state == android.media.AudioTrack.STATE_INITIALIZED) {
return;
}
// The track is not successfully initialized. Release and null the track.
try {
audioTrack.release();
} catch (Exception e) {
// The track has already failed to initialize, so it wouldn't be that surprising if release
// were to fail too. Swallow the exception.
} finally {
audioTrack = null;
}
throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
}
/**
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
* expressed as a long.
*/
private long getPlaybackPositionFrames() {
long rawPlaybackHeadPosition = 0xFFFFFFFFL & audioTrack.getPlaybackHeadPosition();
if (lastRawPlaybackHeadPosition > rawPlaybackHeadPosition) {
// The value must have wrapped around.
rawPlaybackHeadWrapCount++;
}
lastRawPlaybackHeadPosition = rawPlaybackHeadPosition;
return rawPlaybackHeadPosition + (rawPlaybackHeadWrapCount << 32);
}
private long getPlaybackPositionUs() {
return framesToDurationUs(getPlaybackPositionFrames());
}
private long framesToBytes(long frameCount) {
// This method is unused on SDK >= 21.
return frameCount * frameSize;
}
private long bytesToFrames(long byteCount) {
if (isAc3) {
return byteCount * 8 * sampleRate / (1000 * ac3Bitrate);
} else {
return byteCount / frameSize;
}
}
private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / sampleRate;
}
private long durationUsToFrames(long durationUs) {
return (durationUs * sampleRate) / C.MICROS_PER_SECOND;
}
private void resetSyncParams() {
smoothedPlayheadOffsetUs = 0;
playheadOffsetCount = 0;
nextPlayheadOffsetIndex = 0;
lastPlayheadSampleTimeUs = 0;
audioTimestampSet = false;
lastTimestampSampleTimeUs = 0;
}
/**
* Interface exposing the {@link android.media.AudioTimestamp} methods we need that were added in
* SDK 19.
*/
private interface AudioTimestampCompat {
/**
* Returns true if the audioTimestamp was retrieved from the audioTrack.
*/
boolean update(android.media.AudioTrack audioTrack);
long getNanoTime();
long getFramePosition();
}
/**
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
*/
private static final class NoopAudioTimestampCompat implements AudioTimestampCompat {
@Override
public boolean update(android.media.AudioTrack audioTrack) {
return false;
}
@Override
public long getNanoTime() {
// Should never be called if initTimestamp() returned false.
throw new UnsupportedOperationException();
}
@Override
public long getFramePosition() {
// Should never be called if initTimestamp() returned false.
throw new UnsupportedOperationException();
}
}
/**
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
* implementations added in SDK 19.
*/
@TargetApi(19)
private static final class AudioTimestampCompatV19 implements AudioTimestampCompat {
private final AudioTimestamp audioTimestamp;
public AudioTimestampCompatV19() {
audioTimestamp = new AudioTimestamp();
}
@Override
public boolean update(android.media.AudioTrack audioTrack) {
return audioTrack.getTimestamp(audioTimestamp);
}
@Override
public long getNanoTime() {
return audioTimestamp.nanoTime;
}
@Override
public long getFramePosition() {
return audioTimestamp.framePosition;
}
}
}

View File

@ -134,18 +134,6 @@ public abstract class Chunk implements Loadable {
consumeStream(dataSourceStream);
}
/**
* Returns a byte array containing the loaded data. If the chunk is partially loaded, this
* method returns the data that has been loaded so far. If nothing has been loaded, null is
* returned.
*
* @return The loaded data or null.
*/
public final byte[] getLoadedData() {
Assertions.checkState(dataSourceStream != null);
return dataSourceStream.getLoadedData();
}
/**
* Invoked by {@link #consume()}. Implementations may override this method if they wish to
* consume the loaded data at this point.

View File

@ -24,6 +24,7 @@ import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.util.Assertions;
import android.os.Handler;
@ -39,7 +40,7 @@ import java.util.List;
* A {@link SampleSource} that loads media in {@link Chunk}s, which are themselves obtained from a
* {@link ChunkSource}.
*/
public class ChunkSampleSource implements SampleSource, Loader.Listener {
public class ChunkSampleSource implements SampleSource, Loader.Callback {
/**
* Interface definition for a callback to be notified of {@link ChunkSampleSource} events.
@ -133,6 +134,11 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
/**
* The default minimum number of times to retry loading data prior to failing.
*/
public static final int DEFAULT_MIN_LOADABLE_RETRY_COUNT = 1;
private static final int STATE_UNPREPARED = 0;
private static final int STATE_PREPARED = 1;
private static final int STATE_ENABLED = 2;
@ -149,11 +155,12 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
private final boolean frameAccurateSeeking;
private final Handler eventHandler;
private final EventListener eventListener;
private final int minLoadableRetryCount;
private int state;
private long downstreamPositionUs;
private long lastSeekPositionUs;
private long pendingResetTime;
private long pendingResetPositionUs;
private long lastPerformedBufferOperation;
private boolean pendingDiscontinuity;
@ -174,6 +181,13 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl,
int bufferSizeContribution, boolean frameAccurateSeeking, Handler eventHandler,
EventListener eventListener, int eventSourceId) {
this(chunkSource, loadControl, bufferSizeContribution, frameAccurateSeeking, eventHandler,
eventListener, eventSourceId, DEFAULT_MIN_LOADABLE_RETRY_COUNT);
}
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl,
int bufferSizeContribution, boolean frameAccurateSeeking, Handler eventHandler,
EventListener eventListener, int eventSourceId, int minLoadableRetryCount) {
this.chunkSource = chunkSource;
this.loadControl = loadControl;
this.bufferSizeContribution = bufferSizeContribution;
@ -181,6 +195,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
this.eventHandler = eventHandler;
this.eventListener = eventListener;
this.eventSourceId = eventSourceId;
this.minLoadableRetryCount = minLoadableRetryCount;
currentLoadableHolder = new ChunkOperationHolder();
mediaChunks = new LinkedList<MediaChunk>();
readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks);
@ -199,7 +214,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
@Override
public boolean prepare() {
Assertions.checkState(state == STATE_UNPREPARED);
loader = new Loader("Loader:" + chunkSource.getTrackInfo().mimeType, this);
loader = new Loader("Loader:" + chunkSource.getTrackInfo().mimeType);
state = STATE_PREPARED;
return true;
}
@ -218,7 +233,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
@Override
public void enable(int track, long timeUs) {
public void enable(int track, long positionUs) {
Assertions.checkState(state == STATE_PREPARED);
Assertions.checkState(track == 0);
state = STATE_ENABLED;
@ -226,9 +241,9 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
loadControl.register(this, bufferSizeContribution);
downstreamFormat = null;
downstreamMediaFormat = null;
downstreamPositionUs = timeUs;
lastSeekPositionUs = timeUs;
restartFrom(timeUs);
downstreamPositionUs = positionUs;
lastSeekPositionUs = positionUs;
restartFrom(positionUs);
}
@Override
@ -237,22 +252,25 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
Assertions.checkState(track == 0);
pendingDiscontinuity = false;
state = STATE_PREPARED;
loadControl.unregister(this);
chunkSource.disable(mediaChunks);
if (loader.isLoading()) {
loader.cancelLoading();
} else {
clearMediaChunks();
clearCurrentLoadable();
loadControl.trimAllocator();
try {
chunkSource.disable(mediaChunks);
} finally {
loadControl.unregister(this);
if (loader.isLoading()) {
loader.cancelLoading();
} else {
clearMediaChunks();
clearCurrentLoadable();
loadControl.trimAllocator();
}
}
}
@Override
public boolean continueBuffering(long playbackPositionUs) throws IOException {
public boolean continueBuffering(long positionUs) throws IOException {
Assertions.checkState(state == STATE_ENABLED);
downstreamPositionUs = playbackPositionUs;
chunkSource.continueBuffering(playbackPositionUs);
downstreamPositionUs = positionUs;
chunkSource.continueBuffering(positionUs);
updateLoadControl();
if (isPendingReset() || mediaChunks.isEmpty()) {
return false;
@ -267,7 +285,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
@Override
public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder,
public int readData(int track, long positionUs, MediaFormatHolder formatHolder,
SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException {
Assertions.checkState(state == STATE_ENABLED);
Assertions.checkState(track == 0);
@ -281,11 +299,9 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
return NOTHING_READ;
}
downstreamPositionUs = playbackPositionUs;
downstreamPositionUs = positionUs;
if (isPendingReset()) {
if (currentLoadableException != null) {
throw currentLoadableException;
}
maybeThrowLoadableException();
IOException chunkSourceException = chunkSource.getError();
if (chunkSourceException != null) {
throw chunkSourceException;
@ -300,7 +316,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
discardDownstreamMediaChunk();
mediaChunk = mediaChunks.getFirst();
mediaChunk.seekToStart();
return readData(track, playbackPositionUs, formatHolder, sampleHolder, false);
return readData(track, positionUs, formatHolder, sampleHolder, false);
} else if (mediaChunk.isLastChunk()) {
return END_OF_STREAM;
}
@ -338,40 +354,44 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
onSampleRead(mediaChunk, sampleHolder);
return SAMPLE_READ;
} else {
if (currentLoadableException != null) {
throw currentLoadableException;
}
maybeThrowLoadableException();
return NOTHING_READ;
}
}
@Override
public void seekToUs(long timeUs) {
public void seekToUs(long positionUs) {
Assertions.checkState(state == STATE_ENABLED);
downstreamPositionUs = timeUs;
lastSeekPositionUs = timeUs;
if (pendingResetTime == timeUs) {
downstreamPositionUs = positionUs;
lastSeekPositionUs = positionUs;
if (pendingResetPositionUs == positionUs) {
return;
}
MediaChunk mediaChunk = getMediaChunk(timeUs);
MediaChunk mediaChunk = getMediaChunk(positionUs);
if (mediaChunk == null) {
restartFrom(timeUs);
restartFrom(positionUs);
pendingDiscontinuity = true;
} else {
pendingDiscontinuity |= mediaChunk.seekTo(timeUs, mediaChunk == mediaChunks.getFirst());
pendingDiscontinuity |= mediaChunk.seekTo(positionUs, mediaChunk == mediaChunks.getFirst());
discardDownstreamMediaChunks(mediaChunk);
updateLoadControl();
}
}
private MediaChunk getMediaChunk(long timeUs) {
private void maybeThrowLoadableException() throws IOException {
if (currentLoadableException != null && currentLoadableExceptionCount > minLoadableRetryCount) {
throw currentLoadableException;
}
}
private MediaChunk getMediaChunk(long positionUs) {
Iterator<MediaChunk> mediaChunkIterator = mediaChunks.iterator();
while (mediaChunkIterator.hasNext()) {
MediaChunk mediaChunk = mediaChunkIterator.next();
if (timeUs < mediaChunk.startTimeUs) {
if (positionUs < mediaChunk.startTimeUs) {
return null;
} else if (mediaChunk.isLastChunk() || timeUs < mediaChunk.endTimeUs) {
} else if (mediaChunk.isLastChunk() || positionUs < mediaChunk.endTimeUs) {
return mediaChunk;
}
}
@ -382,7 +402,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
public long getBufferedPositionUs() {
Assertions.checkState(state == STATE_ENABLED);
if (isPendingReset()) {
return pendingResetTime;
return pendingResetPositionUs;
}
MediaChunk mediaChunk = mediaChunks.getLast();
Chunk currentLoadable = currentLoadableHolder.chunk;
@ -413,7 +433,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
@Override
public void onLoaded() {
public void onLoadCompleted(Loadable loadable) {
Chunk currentLoadable = currentLoadableHolder.chunk;
notifyLoadCompleted(currentLoadable.bytesLoaded());
try {
@ -436,7 +456,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
@Override
public void onCanceled() {
public void onLoadCanceled(Loadable loadable) {
Chunk currentLoadable = currentLoadableHolder.chunk;
notifyLoadCanceled(currentLoadable.bytesLoaded());
if (!isMediaChunk(currentLoadable)) {
@ -444,7 +464,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
clearCurrentLoadable();
if (state == STATE_ENABLED) {
restartFrom(pendingResetTime);
restartFrom(pendingResetPositionUs);
} else {
clearMediaChunks();
loadControl.trimAllocator();
@ -452,7 +472,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
@Override
public void onError(IOException e) {
public void onLoadError(Loadable loadable, IOException e) {
currentLoadableException = e;
currentLoadableExceptionCount++;
currentLoadableExceptionTimestamp = SystemClock.elapsedRealtime();
@ -472,8 +492,8 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
// no-op
}
private void restartFrom(long timeUs) {
pendingResetTime = timeUs;
private void restartFrom(long positionUs) {
pendingResetPositionUs = positionUs;
if (loader.isLoading()) {
loader.cancelLoading();
} else {
@ -495,23 +515,40 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
private void updateLoadControl() {
long loadPositionUs;
if (isPendingReset()) {
loadPositionUs = pendingResetTime;
} else {
MediaChunk lastMediaChunk = mediaChunks.getLast();
loadPositionUs = lastMediaChunk.nextChunkIndex == -1 ? -1 : lastMediaChunk.endTimeUs;
}
boolean isBackedOff = currentLoadableException != null && !currentLoadableExceptionFatal;
boolean nextLoader = loadControl.update(this, downstreamPositionUs, loadPositionUs,
isBackedOff || loader.isLoading(), currentLoadableExceptionFatal);
if (currentLoadableExceptionFatal) {
// We've failed, but we still need to update the control with our current state.
loadControl.update(this, downstreamPositionUs, -1, false, true);
return;
}
long now = SystemClock.elapsedRealtime();
long nextLoadPositionUs = getNextLoadPositionUs();
boolean isBackedOff = currentLoadableException != null;
boolean loadingOrBackedOff = loader.isLoading() || isBackedOff;
// If we're not loading or backed off, evaluate the operation if (a) we don't have the next
// chunk yet and we're not finished, or (b) if the last evaluation was over 2000ms ago.
if (!loadingOrBackedOff && ((currentLoadableHolder.chunk == null && nextLoadPositionUs != -1)
|| (now - lastPerformedBufferOperation > 2000))) {
// Perform the evaluation.
lastPerformedBufferOperation = now;
currentLoadableHolder.queueSize = readOnlyMediaChunks.size();
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetPositionUs,
downstreamPositionUs, currentLoadableHolder);
boolean chunksDiscarded = discardUpstreamMediaChunks(currentLoadableHolder.queueSize);
// Update the next load position as appropriate.
if (currentLoadableHolder.chunk == null) {
// Set loadPosition to -1 to indicate that we don't have anything to load.
nextLoadPositionUs = -1;
} else if (chunksDiscarded) {
// Chunks were discarded, so we need to re-evaluate the load position.
nextLoadPositionUs = getNextLoadPositionUs();
}
}
// Update the control with our current state, and determine whether we're the next loader.
boolean nextLoader = loadControl.update(this, downstreamPositionUs, nextLoadPositionUs,
loadingOrBackedOff, false);
if (isBackedOff) {
long elapsedMillis = now - currentLoadableExceptionTimestamp;
@ -521,17 +558,21 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
return;
}
if (!loader.isLoading()) {
if (currentLoadableHolder.chunk == null || now - lastPerformedBufferOperation > 1000) {
lastPerformedBufferOperation = now;
currentLoadableHolder.queueSize = readOnlyMediaChunks.size();
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetTime, downstreamPositionUs,
currentLoadableHolder);
discardUpstreamMediaChunks(currentLoadableHolder.queueSize);
}
if (nextLoader) {
maybeStartLoading();
}
if (!loader.isLoading() && nextLoader) {
maybeStartLoading();
}
}
/**
* Gets the next load time, assuming that the next load starts where the previous chunk ended (or
* from the pending reset time, if there is one).
*/
private long getNextLoadPositionUs() {
if (isPendingReset()) {
return pendingResetPositionUs;
} else {
MediaChunk lastMediaChunk = mediaChunks.getLast();
return lastMediaChunk.nextChunkIndex == -1 ? -1 : lastMediaChunk.endTimeUs;
}
}
@ -548,12 +589,12 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
Chunk backedOffChunk = currentLoadableHolder.chunk;
if (!isMediaChunk(backedOffChunk)) {
currentLoadableHolder.queueSize = readOnlyMediaChunks.size();
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetTime, downstreamPositionUs,
currentLoadableHolder);
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetPositionUs,
downstreamPositionUs, currentLoadableHolder);
discardUpstreamMediaChunks(currentLoadableHolder.queueSize);
if (currentLoadableHolder.chunk == backedOffChunk) {
// Chunk was unchanged. Resume loading.
loader.startLoading(backedOffChunk);
loader.startLoading(backedOffChunk, this);
} else {
backedOffChunk.release();
maybeStartLoading();
@ -564,7 +605,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
if (backedOffChunk == mediaChunks.getFirst()) {
// We're not able to clear the first media chunk, so we have no choice but to continue
// loading it.
loader.startLoading(backedOffChunk);
loader.startLoading(backedOffChunk, this);
return;
}
@ -573,13 +614,13 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
MediaChunk removedChunk = mediaChunks.removeLast();
Assertions.checkState(backedOffChunk == removedChunk);
currentLoadableHolder.queueSize = readOnlyMediaChunks.size();
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetTime, downstreamPositionUs,
chunkSource.getChunkOperation(readOnlyMediaChunks, pendingResetPositionUs, downstreamPositionUs,
currentLoadableHolder);
mediaChunks.add(removedChunk);
if (currentLoadableHolder.chunk == backedOffChunk) {
// Chunk was unchanged. Resume loading.
loader.startLoading(backedOffChunk);
loader.startLoading(backedOffChunk, this);
} else {
// This call will remove and release at least one chunk from the end of mediaChunks. Since
// the current loadable is the last media chunk, it is guaranteed to be removed.
@ -599,8 +640,8 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
if (isMediaChunk(currentLoadable)) {
MediaChunk mediaChunk = (MediaChunk) currentLoadable;
if (isPendingReset()) {
mediaChunk.seekTo(pendingResetTime, false);
pendingResetTime = NO_RESET_PENDING;
mediaChunk.seekTo(pendingResetPositionUs, false);
pendingResetPositionUs = NO_RESET_PENDING;
}
mediaChunks.add(mediaChunk);
notifyLoadStarted(mediaChunk.format.id, mediaChunk.trigger, false,
@ -609,7 +650,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
notifyLoadStarted(currentLoadable.format.id, currentLoadable.trigger, true, -1, -1,
currentLoadable.getLength());
}
loader.startLoading(currentLoadable);
loader.startLoading(currentLoadable, this);
}
/**
@ -648,10 +689,11 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
* Discard upstream media chunks until the queue length is equal to the length specified.
*
* @param queueLength The desired length of the queue.
* @return True if chunks were discarded. False otherwise.
*/
private void discardUpstreamMediaChunks(int queueLength) {
private boolean discardUpstreamMediaChunks(int queueLength) {
if (mediaChunks.size() <= queueLength) {
return;
return false;
}
long totalBytes = 0;
long startTimeUs = 0;
@ -663,6 +705,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
removed.release();
}
notifyUpstreamDiscarded(startTimeUs, endTimeUs, totalBytes);
return true;
}
private boolean isMediaChunk(Chunk chunk) {
@ -670,7 +713,7 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
private boolean isPendingReset() {
return pendingResetTime != NO_RESET_PENDING;
return pendingResetPositionUs != NO_RESET_PENDING;
}
private long getRetryDelayMillis(long errorCount) {
@ -753,13 +796,13 @@ public class ChunkSampleSource implements SampleSource, Loader.Listener {
}
private void notifyDownstreamFormatChanged(final String formatId, final int trigger,
final long mediaTimeUs) {
final long positionUs) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDownstreamFormatChanged(eventSourceId, formatId, trigger,
usToMs(mediaTimeUs));
usToMs(positionUs));
}
});
}

View File

@ -45,6 +45,8 @@ public interface ChunkSource {
* the supplied {@link MediaFormat}. Other implementations do nothing.
* <p>
* Only called when the source is enabled.
*
* @param out The {@link MediaFormat} on which the maximum video dimensions should be set.
*/
void getMaxVideoDimensions(MediaFormat out);

View File

@ -40,6 +40,17 @@ public final class Mp4MediaChunk extends MediaChunk {
private MediaFormat mediaFormat;
private Map<UUID, byte[]> psshInfo;
/**
* @deprecated Use the other constructor, passing null as {@code psshInfo}.
*/
@Deprecated
public Mp4MediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex,
Extractor extractor, boolean maybeSelfContained, long sampleOffsetUs) {
this(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex,
extractor, null, maybeSelfContained, sampleOffsetUs);
}
/**
* @param dataSource A {@link DataSource} for loading the data.
* @param dataSpec Defines the data to be loaded.
@ -49,6 +60,8 @@ public final class Mp4MediaChunk extends MediaChunk {
* @param endTimeUs The end time of the media contained by the chunk, in microseconds.
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
* @param extractor The extractor that will be used to extract the samples.
* @param psshInfo Pssh data. May be null if pssh data is present within the stream, meaning it
* can be obtained directly from {@code extractor}, or if no pssh data is required.
* @param maybeSelfContained Set to true if this chunk might be self contained, meaning it might
* contain a moov atom defining the media format of the chunk. This parameter can always be
* safely set to true. Setting to false where the chunk is known to not be self contained may
@ -56,12 +69,13 @@ public final class Mp4MediaChunk extends MediaChunk {
* @param sampleOffsetUs An offset to subtract from the sample timestamps parsed by the extractor.
*/
public Mp4MediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex,
Extractor extractor, boolean maybeSelfContained, long sampleOffsetUs) {
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, Extractor extractor,
Map<UUID, byte[]> psshInfo, boolean maybeSelfContained, long sampleOffsetUs) {
super(dataSource, dataSpec, format, trigger, startTimeUs, endTimeUs, nextChunkIndex);
this.extractor = extractor;
this.maybeSelfContained = maybeSelfContained;
this.sampleOffsetUs = sampleOffsetUs;
this.psshInfo = psshInfo;
}
@Override
@ -96,8 +110,11 @@ public final class Mp4MediaChunk extends MediaChunk {
prepared = true;
}
if (prepared) {
mediaFormat = Assertions.checkNotNull(extractor.getFormat());
psshInfo = extractor.getPsshInfo();
mediaFormat = extractor.getFormat();
Map<UUID, byte[]> extractorPsshInfo = extractor.getPsshInfo();
if (extractorPsshInfo != null) {
psshInfo = extractorPsshInfo;
}
}
}
return prepared;

View File

@ -46,6 +46,10 @@ public class MultiTrackChunkSource implements ChunkSource, ExoPlayerComponent {
this.selectedSource = sources[0];
}
public MultiTrackChunkSource(List<ChunkSource> sources) {
this(toChunkSourceArray(sources));
}
/**
* Gets the number of tracks that this source can switch between. May be called safely from any
* thread.
@ -107,4 +111,10 @@ public class MultiTrackChunkSource implements ChunkSource, ExoPlayerComponent {
selectedSource.onChunkLoadError(chunk, e);
}
private static ChunkSource[] toChunkSourceArray(List<ChunkSource> sources) {
ChunkSource[] chunkSourceArray = new ChunkSource[sources.size()];
sources.toArray(chunkSourceArray);
return chunkSourceArray;
}
}

View File

@ -0,0 +1,110 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import java.io.IOException;
import java.util.List;
/**
* A chunk source that provides a single chunk containing a single sample.
* <p>
* An example use case for this implementation is to act as the source for loading out-of-band
* subtitles, where subtitles for the entire video are delivered as a single file.
*/
public class SingleSampleChunkSource implements ChunkSource {
private final DataSource dataSource;
private final DataSpec dataSpec;
private final Format format;
private final long durationUs;
private final MediaFormat mediaFormat;
private final TrackInfo trackInfo;
/**
* @param dataSource A {@link DataSource} suitable for loading the sample data.
* @param dataSpec Defines the location of the sample.
* @param format The format of the sample.
* @param durationUs The duration of the sample in microseconds, or {@link C#UNKNOWN_TIME_US} if
* the duration is unknown.
* @param mediaFormat The sample media format. May be null.
*/
public SingleSampleChunkSource(DataSource dataSource, DataSpec dataSpec, Format format,
long durationUs, MediaFormat mediaFormat) {
this.dataSource = dataSource;
this.dataSpec = dataSpec;
this.format = format;
this.durationUs = durationUs;
this.mediaFormat = mediaFormat;
trackInfo = new TrackInfo(format.mimeType, durationUs);
}
@Override
public TrackInfo getTrackInfo() {
return trackInfo;
}
@Override
public void getMaxVideoDimensions(MediaFormat out) {
// Do nothing.
}
@Override
public void enable() {
// Do nothing.
}
@Override
public void continueBuffering(long playbackPositionUs) {
// Do nothing.
}
@Override
public void getChunkOperation(List<? extends MediaChunk> queue, long seekPositionUs,
long playbackPositionUs, ChunkOperationHolder out) {
if (!queue.isEmpty()) {
// We've already provided the single sample.
return;
}
out.chunk = initChunk();
}
@Override
public void disable(List<? extends MediaChunk> queue) {
// Do nothing.
}
@Override
public IOException getError() {
return null;
}
@Override
public void onChunkLoadError(Chunk chunk, Exception e) {
// Do nothing.
}
private SingleSampleMediaChunk initChunk() {
return new SingleSampleMediaChunk(dataSource, dataSpec, format, 0, 0, durationUs, -1,
mediaFormat);
}
}

View File

@ -22,7 +22,6 @@ import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.UUID;
@ -64,10 +63,8 @@ public class SingleSampleMediaChunk extends MediaChunk {
* @param nextChunkIndex The index of the next chunk, or -1 if this is the last chunk.
* @param sampleFormat The format of the media contained by the chunk.
* @param headerData Custom header data for the sample. May be null. If set, the header data is
* prepended to the sample data returned when {@link #read(SampleHolder)} is called. It is
* however not considered part of the loaded data, and so is not prepended to the data
* returned by {@link #getLoadedData()}. It is also not reflected in the values returned by
* {@link #bytesLoaded()} and {@link #getLength()}.
* prepended to the sample data returned when {@link #read(SampleHolder)} is called. It is not
* reflected in the values returned by {@link #bytesLoaded()} and {@link #getLength()}.
*/
public SingleSampleMediaChunk(DataSource dataSource, DataSpec dataSpec, Format format,
int trigger, long startTimeUs, long endTimeUs, int nextChunkIndex, MediaFormat sampleFormat,
@ -99,9 +96,8 @@ public class SingleSampleMediaChunk extends MediaChunk {
if (headerData != null) {
sampleSize += headerData.length;
}
if (holder.allowDataBufferReplacement &&
(holder.data == null || holder.data.capacity() < sampleSize)) {
holder.data = ByteBuffer.allocate(sampleSize);
if (holder.data == null || holder.data.capacity() < sampleSize) {
holder.replaceBuffer(sampleSize);
}
int bytesRead;
if (holder.data != null) {

View File

@ -15,9 +15,11 @@
*/
package com.google.android.exoplayer.dash;
import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.Chunk;
import com.google.android.exoplayer.chunk.ChunkOperationHolder;
import com.google.android.exoplayer.chunk.ChunkSource;
@ -27,74 +29,180 @@ import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.Evaluation;
import com.google.android.exoplayer.chunk.MediaChunk;
import com.google.android.exoplayer.chunk.Mp4MediaChunk;
import com.google.android.exoplayer.chunk.SingleSampleMediaChunk;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.ContentProtection;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
import com.google.android.exoplayer.dash.mpd.Period;
import com.google.android.exoplayer.dash.mpd.RangedUri;
import com.google.android.exoplayer.dash.mpd.Representation;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.mp4.FragmentedMp4Extractor;
import com.google.android.exoplayer.parser.webm.WebmExtractor;
import com.google.android.exoplayer.text.webvtt.WebvttParser;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.MimeTypes;
import android.net.Uri;
import android.os.SystemClock;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* An {@link ChunkSource} for DASH streams.
* <p>
* This implementation currently supports fMP4 and webm.
* This implementation currently supports fMP4, webm, and webvtt.
*/
public class DashChunkSource implements ChunkSource {
/**
* Thrown when an AdaptationSet is missing from the MPD.
*/
public static class NoAdaptationSetException extends IOException {
public NoAdaptationSetException(String message) {
super(message);
}
}
/**
* Specifies that we should process all tracks.
*/
public static final int USE_ALL_TRACKS = -1;
private final TrackInfo trackInfo;
private final DataSource dataSource;
private final FormatEvaluator evaluator;
private final Evaluation evaluation;
private final StringBuilder headerBuilder;
private final long liveEdgeLatencyUs;
private final int maxWidth;
private final int maxHeight;
private final Format[] formats;
private final HashMap<String, Representation> representations;
private final HashMap<String, Extractor> extractors;
private final HashMap<String, DashSegmentIndex> segmentIndexes;
private final HashMap<String, RepresentationHolder> representationHolders;
private final ManifestFetcher<MediaPresentationDescription> manifestFetcher;
private final int adaptationSetIndex;
private final int[] representationIndices;
private final Map<UUID, byte[]> psshInfo;
private MediaPresentationDescription currentManifest;
private boolean finishedCurrentManifest;
private boolean lastChunkWasInitialization;
private IOException fatalError;
/**
* Lightweight constructor to use for fixed duration content.
*
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param evaluator Selects from the available formats.
* @param formatEvaluator Selects from the available formats.
* @param representations The representations to be considered by the source.
*/
public DashChunkSource(DataSource dataSource, FormatEvaluator evaluator,
public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator,
Representation... representations) {
this(buildManifest(Arrays.asList(representations)), 0, null, dataSource, formatEvaluator);
}
/**
* Lightweight constructor to use for fixed duration content.
*
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
* @param representations The representations to be considered by the source.
*/
public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator,
List<Representation> representations) {
this(buildManifest(representations), 0, null, dataSource, formatEvaluator);
}
/**
* Constructor to use for fixed duration content.
*
* @param manifest The manifest.
* @param adaptationSetIndex The index of the adaptation set that should be used.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
*/
public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator, 0);
}
/**
* Constructor to use for live streaming.
* <p>
* May also be used for fixed duration content, in which case the call is equivalent to calling
* the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument.
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param adaptationSetIndex The index of the adaptation set that should be used.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however
* note that the value sets an upper bound on the length of media that the player can buffer.
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
int adaptationSetIndex, int[] representationIndices, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices,
dataSource, formatEvaluator, liveEdgeLatencyMs * 1000);
}
private DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher,
MediaPresentationDescription initialManifest, int adaptationSetIndex,
int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator,
long liveEdgeLatencyUs) {
this.manifestFetcher = manifestFetcher;
this.currentManifest = initialManifest;
this.adaptationSetIndex = adaptationSetIndex;
this.representationIndices = representationIndices;
this.dataSource = dataSource;
this.evaluator = evaluator;
this.formats = new Format[representations.length];
this.extractors = new HashMap<String, Extractor>();
this.segmentIndexes = new HashMap<String, DashSegmentIndex>();
this.representations = new HashMap<String, Representation>();
this.trackInfo = new TrackInfo(representations[0].format.mimeType,
representations[0].periodDurationMs * 1000);
this.evaluator = formatEvaluator;
this.liveEdgeLatencyUs = liveEdgeLatencyUs;
this.evaluation = new Evaluation();
this.headerBuilder = new StringBuilder();
psshInfo = getPsshInfo(currentManifest, adaptationSetIndex);
Representation[] representations = getFilteredRepresentations(currentManifest,
adaptationSetIndex, representationIndices);
long periodDurationUs = (representations[0].periodDurationMs == TrackRenderer.UNKNOWN_TIME_US)
? TrackRenderer.UNKNOWN_TIME_US : representations[0].periodDurationMs * 1000;
this.trackInfo = new TrackInfo(representations[0].format.mimeType, periodDurationUs);
this.formats = new Format[representations.length];
this.representationHolders = new HashMap<String, RepresentationHolder>();
int maxWidth = 0;
int maxHeight = 0;
for (int i = 0; i < representations.length; i++) {
formats[i] = representations[i].format;
maxWidth = Math.max(formats[i].width, maxWidth);
maxHeight = Math.max(formats[i].height, maxHeight);
Extractor extractor = formats[i].mimeType.startsWith(MimeTypes.VIDEO_WEBM)
? new WebmExtractor() : new FragmentedMp4Extractor();
extractors.put(formats[i].id, extractor);
this.representations.put(formats[i].id, representations[i]);
DashSegmentIndex segmentIndex = representations[i].getIndex();
if (segmentIndex != null) {
segmentIndexes.put(formats[i].id, segmentIndex);
}
Extractor extractor = mimeTypeIsWebm(formats[i].mimeType) ? new WebmExtractor()
: new FragmentedMp4Extractor();
representationHolders.put(formats[i].id,
new RepresentationHolder(representations[i], extractor));
}
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
@ -116,21 +224,67 @@ public class DashChunkSource implements ChunkSource {
@Override
public void enable() {
evaluator.enable();
if (manifestFetcher != null) {
manifestFetcher.enable();
}
}
@Override
public void disable(List<? extends MediaChunk> queue) {
evaluator.disable();
if (manifestFetcher != null) {
manifestFetcher.disable();
}
}
@Override
public void continueBuffering(long playbackPositionUs) {
// Do nothing
if (manifestFetcher == null || !currentManifest.dynamic || fatalError != null) {
return;
}
MediaPresentationDescription newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) {
Representation[] newRepresentations = DashChunkSource.getFilteredRepresentations(newManifest,
adaptationSetIndex, representationIndices);
for (Representation representation : newRepresentations) {
RepresentationHolder representationHolder =
representationHolders.get(representation.format.id);
DashSegmentIndex oldIndex = representationHolder.segmentIndex;
DashSegmentIndex newIndex = representation.getIndex();
int newFirstSegmentNum = newIndex.getFirstSegmentNum();
int segmentNumShift = oldIndex.getSegmentNum(newIndex.getTimeUs(newFirstSegmentNum))
- newFirstSegmentNum;
representationHolder.segmentNumShift += segmentNumShift;
representationHolder.segmentIndex = newIndex;
}
currentManifest = newManifest;
finishedCurrentManifest = false;
}
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
// minUpdatePeriod is set to 0. In such cases we shouldn't refresh unless there is explicit
// signaling in the stream, according to:
// http://azure.microsoft.com/blog/2014/09/13/dash-live-streaming-with-azure-media-service/
long minUpdatePeriod = currentManifest.minUpdatePeriod;
if (minUpdatePeriod == 0) {
minUpdatePeriod = 5000;
}
if (finishedCurrentManifest && (SystemClock.elapsedRealtime()
> manifestFetcher.getManifestLoadTimestamp() + minUpdatePeriod)) {
manifestFetcher.requestRefresh();
}
}
@Override
public final void getChunkOperation(List<? extends MediaChunk> queue, long seekPositionUs,
long playbackPositionUs, ChunkOperationHolder out) {
if (fatalError != null) {
out.chunk = null;
return;
}
evaluation.queueSize = queue.size();
if (evaluation.format == null || !lastChunkWasInitialization) {
evaluator.evaluate(queue, playbackPositionUs, formats, evaluation);
@ -148,17 +302,21 @@ public class DashChunkSource implements ChunkSource {
return;
}
Representation selectedRepresentation = representations.get(selectedFormat.id);
Extractor extractor = extractors.get(selectedRepresentation.format.id);
RepresentationHolder representationHolder = representationHolders.get(selectedFormat.id);
Representation selectedRepresentation = representationHolder.representation;
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
Extractor extractor = representationHolder.extractor;
RangedUri pendingInitializationUri = null;
RangedUri pendingIndexUri = null;
if (extractor.getFormat() == null) {
pendingInitializationUri = selectedRepresentation.getInitializationUri();
}
if (!segmentIndexes.containsKey(selectedRepresentation.format.id)) {
if (segmentIndex == null) {
pendingIndexUri = selectedRepresentation.getIndexUri();
}
if (pendingInitializationUri != null || pendingIndexUri != null) {
// We have initialization and/or index requests to make.
Chunk initializationChunk = newInitializationChunk(pendingInitializationUri, pendingIndexUri,
@ -168,28 +326,48 @@ public class DashChunkSource implements ChunkSource {
return;
}
int nextSegmentNum;
DashSegmentIndex segmentIndex = segmentIndexes.get(selectedRepresentation.format.id);
int segmentNum;
if (queue.isEmpty()) {
nextSegmentNum = segmentIndex.getSegmentNum(seekPositionUs);
if (currentManifest.dynamic) {
seekPositionUs = getLiveSeekPosition();
}
segmentNum = segmentIndex.getSegmentNum(seekPositionUs);
} else {
nextSegmentNum = queue.get(out.queueSize - 1).nextChunkIndex;
segmentNum = queue.get(out.queueSize - 1).nextChunkIndex
- representationHolder.segmentNumShift;
}
if (nextSegmentNum == -1) {
if (currentManifest.dynamic) {
if (segmentNum < segmentIndex.getFirstSegmentNum()) {
// This is before the first chunk in the current manifest.
fatalError = new BehindLiveWindowException();
return;
} else if (segmentNum > segmentIndex.getLastSegmentNum()) {
// This is beyond the last chunk in the current manifest.
finishedCurrentManifest = true;
return;
} else if (segmentNum == segmentIndex.getLastSegmentNum()) {
// This is the last chunk in the current manifest. Mark the manifest as being finished,
// but continue to return the final chunk.
finishedCurrentManifest = true;
}
}
if (segmentNum == -1) {
out.chunk = null;
return;
}
Chunk nextMediaChunk = newMediaChunk(selectedRepresentation, segmentIndex, extractor,
dataSource, nextSegmentNum, evaluation.trigger);
Chunk nextMediaChunk = newMediaChunk(representationHolder, dataSource, segmentNum,
evaluation.trigger);
lastChunkWasInitialization = false;
out.chunk = nextMediaChunk;
}
@Override
public IOException getError() {
return null;
return fatalError != null ? fatalError
: (manifestFetcher != null ? manifestFetcher.getError() : null);
}
@Override
@ -197,6 +375,10 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private boolean mimeTypeIsWebm(String mimeType) {
return mimeType.startsWith(MimeTypes.VIDEO_WEBM) || mimeType.startsWith(MimeTypes.AUDIO_WEBM);
}
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri,
Representation representation, Extractor extractor, DataSource dataSource,
int trigger) {
@ -225,22 +407,106 @@ public class DashChunkSource implements ChunkSource {
}
DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length,
representation.getCacheKey());
return new InitializationLoadable(dataSource, dataSpec, trigger, representation.format,
extractor, expectedExtractorResult, indexAnchor);
}
private Chunk newMediaChunk(Representation representation, DashSegmentIndex segmentIndex,
Extractor extractor, DataSource dataSource, int segmentNum, int trigger) {
int lastSegmentNum = segmentIndex.getLastSegmentNum();
int nextSegmentNum = segmentNum == lastSegmentNum ? -1 : segmentNum + 1;
private Chunk newMediaChunk(RepresentationHolder representationHolder, DataSource dataSource,
int segmentNum, int trigger) {
Representation representation = representationHolder.representation;
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
long startTimeUs = segmentIndex.getTimeUs(segmentNum);
long endTimeUs = segmentNum < lastSegmentNum ? segmentIndex.getTimeUs(segmentNum + 1)
: startTimeUs + segmentIndex.getDurationUs(segmentNum);
long endTimeUs = startTimeUs + segmentIndex.getDurationUs(segmentNum);
boolean isLastSegment = !currentManifest.dynamic
&& segmentNum == segmentIndex.getLastSegmentNum();
int nextAbsoluteSegmentNum = isLastSegment ? -1
: (representationHolder.segmentNumShift + segmentNum + 1);
RangedUri segmentUri = segmentIndex.getSegmentUrl(segmentNum);
DataSpec dataSpec = new DataSpec(segmentUri.getUri(), segmentUri.start, segmentUri.length,
representation.getCacheKey());
return new Mp4MediaChunk(dataSource, dataSpec, representation.format, trigger, startTimeUs,
endTimeUs, nextSegmentNum, extractor, false, 0);
long presentationTimeOffsetUs = representation.presentationTimeOffsetMs * 1000;
if (representation.format.mimeType.equals(MimeTypes.TEXT_VTT)) {
if (representationHolder.vttHeaderOffsetUs != presentationTimeOffsetUs) {
// Update the VTT header.
headerBuilder.setLength(0);
headerBuilder.append(WebvttParser.EXO_HEADER).append("=")
.append(WebvttParser.OFFSET).append(presentationTimeOffsetUs).append("\n");
representationHolder.vttHeader = headerBuilder.toString().getBytes();
representationHolder.vttHeaderOffsetUs = presentationTimeOffsetUs;
}
return new SingleSampleMediaChunk(dataSource, dataSpec, representation.format, 0,
startTimeUs, endTimeUs, nextAbsoluteSegmentNum, null, representationHolder.vttHeader);
} else {
return new Mp4MediaChunk(dataSource, dataSpec, representation.format, trigger, startTimeUs,
endTimeUs, nextAbsoluteSegmentNum, representationHolder.extractor, psshInfo, false,
presentationTimeOffsetUs);
}
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition() {
long liveEdgeTimestampUs = Long.MIN_VALUE;
for (RepresentationHolder representationHolder : representationHolders.values()) {
DashSegmentIndex segmentIndex = representationHolder.segmentIndex;
int lastSegmentNum = segmentIndex.getLastSegmentNum();
long indexLiveEdgeTimestampUs = segmentIndex.getTimeUs(lastSegmentNum)
+ segmentIndex.getDurationUs(lastSegmentNum);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, indexLiveEdgeTimestampUs);
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private static Representation[] getFilteredRepresentations(MediaPresentationDescription manifest,
int adaptationSetIndex, int[] representationIndices) {
AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex);
List<Representation> representations = adaptationSet.representations;
if (representationIndices == null) {
Representation[] filteredRepresentations = new Representation[representations.size()];
representations.toArray(filteredRepresentations);
return filteredRepresentations;
} else {
Representation[] filteredRepresentations = new Representation[representationIndices.length];
for (int i = 0; i < representationIndices.length; i++) {
filteredRepresentations[i] = representations.get(representationIndices[i]);
}
return filteredRepresentations;
}
}
private static Map<UUID, byte[]> getPsshInfo(MediaPresentationDescription manifest,
int adaptationSetIndex) {
AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex);
if (adaptationSet.contentProtections.isEmpty()) {
return null;
} else {
Map<UUID, byte[]> psshInfo = new HashMap<UUID, byte[]>();
for (ContentProtection contentProtection : adaptationSet.contentProtections) {
if (contentProtection.uuid != null && contentProtection.data != null) {
psshInfo.put(contentProtection.uuid, contentProtection.data);
}
}
return psshInfo.isEmpty() ? null : psshInfo;
}
}
private static MediaPresentationDescription buildManifest(List<Representation> representations) {
Representation firstRepresentation = representations.get(0);
AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations);
Period period = new Period(null, firstRepresentation.periodStartMs,
firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet));
long duration = firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs;
return new MediaPresentationDescription(-1, duration, -1, false, -1, -1, null,
Collections.singletonList(period));
}
private class InitializationLoadable extends Chunk {
@ -268,11 +534,30 @@ public class DashChunkSource implements ChunkSource {
+ expectedExtractorResult + ", got " + result);
}
if ((result & Extractor.RESULT_READ_INDEX) != 0) {
segmentIndexes.put(format.id,
new DashWrappingSegmentIndex(extractor.getIndex(), uri, indexAnchor));
representationHolders.get(format.id).segmentIndex =
new DashWrappingSegmentIndex(extractor.getIndex(), uri, indexAnchor);
}
}
}
private static class RepresentationHolder {
public final Representation representation;
public final Extractor extractor;
public DashSegmentIndex segmentIndex;
public int segmentNumShift;
public long vttHeaderOffsetUs;
public byte[] vttHeader;
public RepresentationHolder(Representation representation, Extractor extractor) {
this.representation = representation;
this.extractor = extractor;
this.segmentIndex = representation.getIndex();
}
}
}

View File

@ -21,7 +21,7 @@ import java.util.List;
/**
* Represents a set of interchangeable encoded versions of a media content component.
*/
public final class AdaptationSet {
public class AdaptationSet {
public static final int TYPE_UNKNOWN = -1;
public static final int TYPE_VIDEO = 0;

View File

@ -15,36 +15,37 @@
*/
package com.google.android.exoplayer.dash.mpd;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
/**
* Represents a ContentProtection tag in an AdaptationSet. Holds arbitrary data for various DRM
* schemes.
* Represents a ContentProtection tag in an AdaptationSet.
*/
public final class ContentProtection {
public class ContentProtection {
/**
* Identifies the content protection scheme.
*/
public final String schemeUriId;
/**
* Protection scheme specific data.
* The UUID of the protection scheme. May be null.
*/
public final Map<String, String> keyedData;
public final UUID uuid;
/**
* Protection scheme specific data. May be null.
*/
public final byte[] data;
/**
* @param schemeUriId Identifies the content protection scheme.
* @param keyedData Data specific to the scheme.
* @param uuid The UUID of the protection scheme, if known. May be null.
* @param data Protection scheme specific initialization data. May be null.
*/
public ContentProtection(String schemeUriId, Map<String, String> keyedData) {
public ContentProtection(String schemeUriId, UUID uuid, byte[] data) {
this.schemeUriId = schemeUriId;
if (keyedData != null) {
this.keyedData = Collections.unmodifiableMap(keyedData);
} else {
this.keyedData = Collections.emptyMap();
}
this.uuid = uuid;
this.data = data;
}
}

View File

@ -21,7 +21,7 @@ import java.util.List;
/**
* Represents a DASH media presentation description (mpd).
*/
public final class MediaPresentationDescription {
public class MediaPresentationDescription {
public final long availabilityStartTime;

View File

@ -1,64 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.dash.mpd;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.util.ManifestFetcher;
import android.net.Uri;
import java.io.IOException;
import java.io.InputStream;
/**
* A concrete implementation of {@link ManifestFetcher} for loading DASH manifests.
* <p>
* This class is provided for convenience, however it is expected that most applications will
* contain their own mechanisms for making asynchronous network requests and parsing the response.
* In such cases it is recommended that application developers use their existing solution rather
* than this one.
*/
public final class MediaPresentationDescriptionFetcher extends
ManifestFetcher<MediaPresentationDescription> {
private final MediaPresentationDescriptionParser parser;
/**
* @param callback The callback to provide with the parsed manifest (or error).
*/
public MediaPresentationDescriptionFetcher(
ManifestCallback<MediaPresentationDescription> callback) {
super(callback);
parser = new MediaPresentationDescriptionParser();
}
/**
* @param callback The callback to provide with the parsed manifest (or error).
* @param timeoutMillis The timeout in milliseconds for the connection used to load the data.
*/
public MediaPresentationDescriptionFetcher(
ManifestCallback<MediaPresentationDescription> callback, int timeoutMillis) {
super(callback, timeoutMillis);
parser = new MediaPresentationDescriptionParser();
}
@Override
protected MediaPresentationDescription parse(InputStream stream, String inputEncoding,
String contentId, Uri baseUrl) throws IOException, ParserException {
return parser.parseMediaPresentationDescription(stream, inputEncoding, contentId, baseUrl);
}
}

View File

@ -22,7 +22,9 @@ import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentTemplate;
import com.google.android.exoplayer.dash.mpd.SegmentBase.SegmentTimelineElement;
import com.google.android.exoplayer.dash.mpd.SegmentBase.SingleSegmentBase;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ManifestParser;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.net.Uri;
import android.text.TextUtils;
@ -34,29 +36,15 @@ import org.xmlpull.v1.XmlPullParserFactory;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A parser of media presentation description files.
*/
public class MediaPresentationDescriptionParser extends DefaultHandler {
// Note: Does not support the date part of ISO 8601
private static final Pattern DURATION =
Pattern.compile("^PT(([0-9]*)H)?(([0-9]*)M)?(([0-9.]*)S)?$");
private static final Pattern DATE_TIME_PATTERN =
Pattern.compile("(\\d\\d\\d\\d)\\-(\\d\\d)\\-(\\d\\d)[Tt]"
+ "(\\d\\d):(\\d\\d):(\\d\\d)(\\.(\\d+))?"
+ "([Zz]|((\\+|\\-)(\\d\\d):(\\d\\d)))?");
public class MediaPresentationDescriptionParser extends DefaultHandler
implements ManifestParser<MediaPresentationDescription> {
private final XmlPullParserFactory xmlParserFactory;
@ -70,19 +58,9 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
// MPD parsing.
/**
* Parses a manifest from the provided {@link InputStream}.
*
* @param inputStream The stream from which to parse the manifest.
* @param inputEncoding The encoding of the input.
* @param contentId The content id of the media.
* @param baseUrl The url that any relative urls defined within the manifest are relative to.
* @return The parsed manifest.
* @throws IOException If a problem occurred reading from the stream.
* @throws ParserException If a problem occurred parsing the xml as a DASH mpd.
*/
public MediaPresentationDescription parseMediaPresentationDescription(InputStream inputStream,
String inputEncoding, String contentId, Uri baseUrl) throws IOException, ParserException {
@Override
public MediaPresentationDescription parse(InputStream inputStream, String inputEncoding,
String contentId, Uri baseUrl) throws IOException, ParserException {
try {
XmlPullParser xpp = xmlParserFactory.newPullParser();
xpp.setInput(inputStream, inputEncoding);
@ -99,15 +77,16 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
}
}
private MediaPresentationDescription parseMediaPresentationDescription(XmlPullParser xpp,
protected MediaPresentationDescription parseMediaPresentationDescription(XmlPullParser xpp,
String contentId, Uri baseUrl) throws XmlPullParserException, IOException, ParseException {
long availabilityStartTime = parseDateTime(xpp, "availabilityStartTime", -1);
long durationMs = parseDurationMs(xpp, "mediaPresentationDuration");
long minBufferTimeMs = parseDurationMs(xpp, "minBufferTime");
long durationMs = parseDuration(xpp, "mediaPresentationDuration", -1);
long minBufferTimeMs = parseDuration(xpp, "minBufferTime", -1);
String typeString = xpp.getAttributeValue(null, "type");
boolean dynamic = (typeString != null) ? typeString.equals("dynamic") : false;
long minUpdateTimeMs = (dynamic) ? parseDurationMs(xpp, "minimumUpdatePeriod", -1) : -1;
long timeShiftBufferDepthMs = (dynamic) ? parseDurationMs(xpp, "timeShiftBufferDepth", -1) : -1;
long minUpdateTimeMs = (dynamic) ? parseDuration(xpp, "minimumUpdatePeriod", -1) : -1;
long timeShiftBufferDepthMs = (dynamic) ? parseDuration(xpp, "timeShiftBufferDepth", -1)
: -1;
UtcTimingElement utcTiming = null;
List<Period> periods = new ArrayList<Period>();
@ -122,21 +101,33 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
}
} while (!isEndTag(xpp, "MPD"));
return buildMediaPresentationDescription(availabilityStartTime, durationMs, minBufferTimeMs,
dynamic, minUpdateTimeMs, timeShiftBufferDepthMs, utcTiming, periods);
}
protected MediaPresentationDescription buildMediaPresentationDescription(
long availabilityStartTime, long durationMs, long minBufferTimeMs, boolean dynamic,
long minUpdateTimeMs, long timeShiftBufferDepthMs, UtcTimingElement utcTiming,
List<Period> periods) {
return new MediaPresentationDescription(availabilityStartTime, durationMs, minBufferTimeMs,
dynamic, minUpdateTimeMs, timeShiftBufferDepthMs, utcTiming, periods);
}
private UtcTimingElement parseUtcTiming(XmlPullParser xpp) {
protected UtcTimingElement parseUtcTiming(XmlPullParser xpp) {
String schemeIdUri = xpp.getAttributeValue(null, "schemeIdUri");
String value = xpp.getAttributeValue(null, "value");
return buildUtcTimingElement(schemeIdUri, value);
}
protected UtcTimingElement buildUtcTimingElement(String schemeIdUri, String value) {
return new UtcTimingElement(schemeIdUri, value);
}
private Period parsePeriod(XmlPullParser xpp, String contentId, Uri baseUrl, long mpdDurationMs)
protected Period parsePeriod(XmlPullParser xpp, String contentId, Uri baseUrl, long mpdDurationMs)
throws XmlPullParserException, IOException {
String id = xpp.getAttributeValue(null, "id");
long startMs = parseDurationMs(xpp, "start", 0);
long durationMs = parseDurationMs(xpp, "duration", mpdDurationMs);
long startMs = parseDuration(xpp, "start", 0);
long durationMs = parseDuration(xpp, "duration", mpdDurationMs);
SegmentBase segmentBase = null;
List<AdaptationSet> adaptationSets = new ArrayList<AdaptationSet>();
do {
@ -155,12 +146,17 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
}
} while (!isEndTag(xpp, "Period"));
return buildPeriod(id, startMs, durationMs, adaptationSets);
}
protected Period buildPeriod(
String id, long startMs, long durationMs, List<AdaptationSet> adaptationSets) {
return new Period(id, startMs, durationMs, adaptationSets);
}
// AdaptationSet parsing.
private AdaptationSet parseAdaptationSet(XmlPullParser xpp, String contentId, Uri baseUrl,
protected AdaptationSet parseAdaptationSet(XmlPullParser xpp, String contentId, Uri baseUrl,
long periodStartMs, long periodDurationMs, SegmentBase segmentBase)
throws XmlPullParserException, IOException {
@ -197,13 +193,20 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
} else if (isStartTag(xpp, "SegmentTemplate")) {
segmentBase = parseSegmentTemplate(xpp, baseUrl, (SegmentTemplate) segmentBase,
periodDurationMs);
} else if (isStartTag(xpp)) {
parseAdaptationSetChild(xpp);
}
} while (!isEndTag(xpp, "AdaptationSet"));
return buildAdaptationSet(id, contentType, representations, contentProtections);
}
protected AdaptationSet buildAdaptationSet(int id, int contentType,
List<Representation> representations, List<ContentProtection> contentProtections) {
return new AdaptationSet(id, contentType, representations, contentProtections);
}
private int parseAdaptationSetType(String contentType) {
protected int parseAdaptationSetType(String contentType) {
return TextUtils.isEmpty(contentType) ? AdaptationSet.TYPE_UNKNOWN
: MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? AdaptationSet.TYPE_AUDIO
: MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? AdaptationSet.TYPE_VIDEO
@ -211,7 +214,7 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
: AdaptationSet.TYPE_UNKNOWN;
}
private int parseAdaptationSetTypeFromMimeType(String mimeType) {
protected int parseAdaptationSetTypeFromMimeType(String mimeType) {
return TextUtils.isEmpty(mimeType) ? AdaptationSet.TYPE_UNKNOWN
: MimeTypes.isAudio(mimeType) ? AdaptationSet.TYPE_AUDIO
: MimeTypes.isVideo(mimeType) ? AdaptationSet.TYPE_VIDEO
@ -249,13 +252,29 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
**/
protected ContentProtection parseContentProtection(XmlPullParser xpp)
throws XmlPullParserException, IOException {
String schemeUriId = xpp.getAttributeValue(null, "schemeUriId");
return new ContentProtection(schemeUriId, null);
String schemeIdUri = xpp.getAttributeValue(null, "schemeIdUri");
return buildContentProtection(schemeIdUri);
}
protected ContentProtection buildContentProtection(String schemeIdUri) {
return new ContentProtection(schemeIdUri, null, null);
}
/**
* Parses children of AdaptationSet elements not specifically parsed elsewhere.
*
* @param xpp The XmpPullParser from which the AdaptationSet child should be parsed.
* @throws XmlPullParserException If an error occurs parsing the element.
* @throws IOException If an error occurs reading the element.
**/
protected void parseAdaptationSetChild(XmlPullParser xpp)
throws XmlPullParserException, IOException {
// pass
}
// Representation parsing.
private Representation parseRepresentation(XmlPullParser xpp, String contentId, Uri baseUrl,
protected Representation parseRepresentation(XmlPullParser xpp, String contentId, Uri baseUrl,
long periodStartMs, long periodDurationMs, String mimeType, String language,
SegmentBase segmentBase) throws XmlPullParserException, IOException {
String id = xpp.getAttributeValue(null, "id");
@ -282,15 +301,27 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
}
} while (!isEndTag(xpp, "Representation"));
Format format = new Format(id, mimeType, width, height, numChannels, audioSamplingRate,
Format format = buildFormat(id, mimeType, width, height, numChannels, audioSamplingRate,
bandwidth, language);
return Representation.newInstance(periodStartMs, periodDurationMs, contentId, -1, format,
return buildRepresentation(periodStartMs, periodDurationMs, contentId, -1, format,
segmentBase);
}
protected Format buildFormat(String id, String mimeType, int width, int height, int numChannels,
int audioSamplingRate, int bandwidth, String language) {
return new Format(id, mimeType, width, height, numChannels, audioSamplingRate,
bandwidth, language);
}
protected Representation buildRepresentation(long periodStartMs, long periodDurationMs,
String contentId, int revisionId, Format format, SegmentBase segmentBase) {
return Representation.newInstance(periodStartMs, periodDurationMs, contentId, revisionId,
format, segmentBase);
}
// SegmentBase, SegmentList and SegmentTemplate parsing.
private SingleSegmentBase parseSegmentBase(XmlPullParser xpp, Uri baseUrl,
protected SingleSegmentBase parseSegmentBase(XmlPullParser xpp, Uri baseUrl,
SingleSegmentBase parent) throws XmlPullParserException, IOException {
long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1);
@ -314,18 +345,24 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
}
} while (!isEndTag(xpp, "SegmentBase"));
return buildSingleSegmentBase(initialization, timescale, presentationTimeOffset, baseUrl,
indexStart, indexLength);
}
protected SingleSegmentBase buildSingleSegmentBase(RangedUri initialization, long timescale,
long presentationTimeOffset, Uri baseUrl, long indexStart, long indexLength) {
return new SingleSegmentBase(initialization, timescale, presentationTimeOffset, baseUrl,
indexStart, indexLength);
}
private SegmentList parseSegmentList(XmlPullParser xpp, Uri baseUrl, SegmentList parent,
protected SegmentList parseSegmentList(XmlPullParser xpp, Uri baseUrl, SegmentList parent,
long periodDuration) throws XmlPullParserException, IOException {
long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1);
long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset",
parent != null ? parent.presentationTimeOffset : 0);
long duration = parseLong(xpp, "duration", parent != null ? parent.duration : -1);
int startNumber = parseInt(xpp, "startNumber", parent != null ? parent.startNumber : 0);
int startNumber = parseInt(xpp, "startNumber", parent != null ? parent.startNumber : 1);
RangedUri initialization = null;
List<SegmentTimelineElement> timeline = null;
@ -351,18 +388,25 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
segments = segments != null ? segments : parent.mediaSegments;
}
return buildSegmentList(initialization, timescale, presentationTimeOffset, periodDuration,
startNumber, duration, timeline, segments);
}
protected SegmentList buildSegmentList(RangedUri initialization, long timescale,
long presentationTimeOffset, long periodDuration, int startNumber, long duration,
List<SegmentTimelineElement> timeline, List<RangedUri> segments) {
return new SegmentList(initialization, timescale, presentationTimeOffset, periodDuration,
startNumber, duration, timeline, segments);
}
private SegmentTemplate parseSegmentTemplate(XmlPullParser xpp, Uri baseUrl,
protected SegmentTemplate parseSegmentTemplate(XmlPullParser xpp, Uri baseUrl,
SegmentTemplate parent, long periodDuration) throws XmlPullParserException, IOException {
long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1);
long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset",
parent != null ? parent.presentationTimeOffset : 0);
long duration = parseLong(xpp, "duration", parent != null ? parent.duration : -1);
int startNumber = parseInt(xpp, "startNumber", parent != null ? parent.startNumber : 0);
int startNumber = parseInt(xpp, "startNumber", parent != null ? parent.startNumber : 1);
UrlTemplate mediaTemplate = parseUrlTemplate(xpp, "media",
parent != null ? parent.mediaTemplate : null);
UrlTemplate initializationTemplate = parseUrlTemplate(xpp, "initialization",
@ -385,11 +429,19 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
timeline = timeline != null ? timeline : parent.segmentTimeline;
}
return buildSegmentTemplate(initialization, timescale, presentationTimeOffset, periodDuration,
startNumber, duration, timeline, initializationTemplate, mediaTemplate, baseUrl);
}
protected SegmentTemplate buildSegmentTemplate(RangedUri initialization, long timescale,
long presentationTimeOffset, long periodDuration, int startNumber, long duration,
List<SegmentTimelineElement> timeline, UrlTemplate initializationTemplate,
UrlTemplate mediaTemplate, Uri baseUrl) {
return new SegmentTemplate(initialization, timescale, presentationTimeOffset, periodDuration,
startNumber, duration, timeline, initializationTemplate, mediaTemplate, baseUrl);
}
private List<SegmentTimelineElement> parseSegmentTimeline(XmlPullParser xpp)
protected List<SegmentTimelineElement> parseSegmentTimeline(XmlPullParser xpp)
throws XmlPullParserException, IOException {
List<SegmentTimelineElement> segmentTimeline = new ArrayList<SegmentTimelineElement>();
long elapsedTime = 0;
@ -400,7 +452,7 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
long duration = parseLong(xpp, "d");
int count = 1 + parseInt(xpp, "r", 0);
for (int i = 0; i < count; i++) {
segmentTimeline.add(new SegmentTimelineElement(elapsedTime, duration));
segmentTimeline.add(buildSegmentTimelineElement(elapsedTime, duration));
elapsedTime += duration;
}
}
@ -408,7 +460,11 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
return segmentTimeline;
}
private UrlTemplate parseUrlTemplate(XmlPullParser xpp, String name,
protected SegmentTimelineElement buildSegmentTimelineElement(long elapsedTime, long duration) {
return new SegmentTimelineElement(elapsedTime, duration);
}
protected UrlTemplate parseUrlTemplate(XmlPullParser xpp, String name,
UrlTemplate defaultValue) {
String valueString = xpp.getAttributeValue(null, name);
if (valueString != null) {
@ -417,15 +473,15 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
return defaultValue;
}
private RangedUri parseInitialization(XmlPullParser xpp, Uri baseUrl) {
protected RangedUri parseInitialization(XmlPullParser xpp, Uri baseUrl) {
return parseRangedUrl(xpp, baseUrl, "sourceURL", "range");
}
private RangedUri parseSegmentUrl(XmlPullParser xpp, Uri baseUrl) {
protected RangedUri parseSegmentUrl(XmlPullParser xpp, Uri baseUrl) {
return parseRangedUrl(xpp, baseUrl, "media", "mediaRange");
}
private RangedUri parseRangedUrl(XmlPullParser xpp, Uri baseUrl, String urlAttribute,
protected RangedUri parseRangedUrl(XmlPullParser xpp, Uri baseUrl, String urlAttribute,
String rangeAttribute) {
String urlText = xpp.getAttributeValue(null, urlAttribute);
long rangeStart = 0;
@ -436,6 +492,11 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
rangeStart = Long.parseLong(rangeTextArray[0]);
rangeLength = Long.parseLong(rangeTextArray[1]) - rangeStart + 1;
}
return buildRangedUri(baseUrl, urlText, rangeStart, rangeLength);
}
protected RangedUri buildRangedUri(Uri baseUrl, String urlText, long rangeStart,
long rangeLength) {
return new RangedUri(baseUrl, urlText, rangeStart, rangeLength);
}
@ -450,83 +511,27 @@ public class MediaPresentationDescriptionParser extends DefaultHandler {
return xpp.getEventType() == XmlPullParser.START_TAG && name.equals(xpp.getName());
}
private static long parseDurationMs(XmlPullParser xpp, String name) {
return parseDurationMs(xpp, name, -1);
protected static boolean isStartTag(XmlPullParser xpp) throws XmlPullParserException {
return xpp.getEventType() == XmlPullParser.START_TAG;
}
private static long parseDateTime(XmlPullParser xpp, String name, long defaultValue)
throws ParseException {
protected static long parseDuration(XmlPullParser xpp, String name, long defaultValue) {
String value = xpp.getAttributeValue(null, name);
if (value == null) {
return defaultValue;
} else {
return parseDateTime(value);
return Util.parseXsDuration(value);
}
}
// VisibleForTesting
static long parseDateTime(String value) throws ParseException {
Matcher matcher = DATE_TIME_PATTERN.matcher(value);
if (!matcher.matches()) {
throw new ParseException("Invalid date/time format: " + value, 0);
}
int timezoneShift;
if (matcher.group(9) == null) {
// No time zone specified.
timezoneShift = 0;
} else if (matcher.group(9).equalsIgnoreCase("Z")) {
timezoneShift = 0;
} else {
timezoneShift = ((Integer.valueOf(matcher.group(12)) * 60
+ Integer.valueOf(matcher.group(13))));
if (matcher.group(11).equals("-")) {
timezoneShift *= -1;
}
}
Calendar dateTime = new GregorianCalendar(TimeZone.getTimeZone("GMT"));
dateTime.clear();
// Note: The month value is 0-based, hence the -1 on group(2)
dateTime.set(Integer.valueOf(matcher.group(1)),
Integer.valueOf(matcher.group(2)) - 1,
Integer.valueOf(matcher.group(3)),
Integer.valueOf(matcher.group(4)),
Integer.valueOf(matcher.group(5)),
Integer.valueOf(matcher.group(6)));
if (!TextUtils.isEmpty(matcher.group(8))) {
final BigDecimal bd = new BigDecimal("0." + matcher.group(8));
// we care only for milliseconds, so movePointRight(3)
dateTime.set(Calendar.MILLISECOND, bd.movePointRight(3).intValue());
}
long time = dateTime.getTimeInMillis();
if (timezoneShift != 0) {
time -= timezoneShift * 60000;
}
return time;
}
private static long parseDurationMs(XmlPullParser xpp, String name, long defaultValue) {
protected static long parseDateTime(XmlPullParser xpp, String name, long defaultValue)
throws ParseException {
String value = xpp.getAttributeValue(null, name);
if (value != null) {
Matcher matcher = DURATION.matcher(value);
if (matcher.matches()) {
String hours = matcher.group(2);
double durationSeconds = (hours != null) ? Double.parseDouble(hours) * 3600 : 0;
String minutes = matcher.group(4);
durationSeconds += (minutes != null) ? Double.parseDouble(minutes) * 60 : 0;
String seconds = matcher.group(6);
durationSeconds += (seconds != null) ? Double.parseDouble(seconds) : 0;
return (long) (durationSeconds * 1000);
} else {
return (long) (Double.parseDouble(value) * 3600 * 1000);
}
if (value == null) {
return defaultValue;
} else {
return Util.parseXsDateTime(value);
}
return defaultValue;
}
protected static Uri parseBaseUrl(XmlPullParser xpp, Uri parentBaseUrl)

View File

@ -21,7 +21,7 @@ import java.util.List;
/**
* Encapsulates media content components over a contiguous period of time.
*/
public final class Period {
public class Period {
/**
* The period identifier, if one exists.
@ -56,4 +56,21 @@ public final class Period {
this.adaptationSets = Collections.unmodifiableList(adaptationSets);
}
/**
* Returns the index of the first adaptation set of a given type, or -1 if no adaptation set of
* the specified type exists.
*
* @param type An adaptation set type.
* @return The index of the first adaptation set of the specified type, or -1.
*/
public int getAdaptationSetIndex(int type) {
int adaptationCount = adaptationSets.size();
for (int i = 0; i < adaptationCount; i++) {
if (adaptationSets.get(i).type == type) {
return i;
}
}
return -1;
}
}

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer.dash.mpd;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import android.net.Uri;
@ -47,15 +48,10 @@ public final class RangedUri {
/**
* Constructs an ranged uri.
* <p>
* The uri is built according to the following rules:
* <ul>
* <li>If {@code baseUri} is null or if {@code stringUri} is absolute, then {@code baseUri} is
* ignored and the url consists solely of {@code stringUri}.
* <li>If {@code stringUri} is null, then the url consists solely of {@code baseUrl}.
* <li>Otherwise, the url consists of the concatenation of {@code baseUri} and {@code stringUri}.
* </ul>
* See {@link Util#getMergedUri(Uri, String)} for a description of how {@code baseUri} and
* {@code stringUri} are merged.
*
* @param baseUri An uri that can form the base of the uri defined by the instance.
* @param baseUri A uri that can form the base of the uri defined by the instance.
* @param stringUri A relative or absolute uri in string form.
* @param start The (zero based) index of the first byte of the range.
* @param length The length of the range, or -1 to indicate that the range is unbounded.
@ -74,14 +70,7 @@ public final class RangedUri {
* @return The {@link Uri} represented by the instance.
*/
public Uri getUri() {
if (stringUri == null) {
return baseUri;
}
Uri uri = Uri.parse(stringUri);
if (!uri.isAbsolute() && baseUri != null) {
uri = Uri.withAppendedPath(baseUri, stringUri);
}
return uri;
return Util.getMergedUri(baseUri, stringUri);
}
/**

View File

@ -15,6 +15,9 @@
*/
package com.google.android.exoplayer.dash.mpd;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.util.Util;
import android.net.Uri;
import java.util.List;
@ -139,11 +142,12 @@ public abstract class SegmentBase {
public final long getSegmentDurationUs(int sequenceNumber) {
if (segmentTimeline != null) {
return (segmentTimeline.get(sequenceNumber - startNumber).duration * 1000000) / timescale;
long duration = segmentTimeline.get(sequenceNumber - startNumber).duration;
return (duration * C.MICROS_PER_SECOND) / timescale;
} else {
return sequenceNumber == getLastSegmentNum()
? (periodDurationMs * 1000) - getSegmentTimeUs(sequenceNumber)
: ((duration * 1000000L) / timescale);
? ((periodDurationMs * 1000) - getSegmentTimeUs(sequenceNumber))
: ((duration * C.MICROS_PER_SECOND) / timescale);
}
}
@ -155,7 +159,7 @@ public abstract class SegmentBase {
} else {
unscaledSegmentTime = (sequenceNumber - startNumber) * duration;
}
return (unscaledSegmentTime * 1000000) / timescale;
return Util.scaleLargeTimestamp(unscaledSegmentTime, C.MICROS_PER_SECOND, timescale);
}
public abstract RangedUri getSegmentUrl(Representation representation, int index);

View File

@ -30,6 +30,7 @@ import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
@ -61,6 +62,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
private final Handler eventHandler;
private final EventListener eventListener;
private final MediaDrm mediaDrm;
private final HashMap<String, String> optionalKeyRequestParameters;
/* package */ final MediaDrmHandler mediaDrmHandler;
/* package */ final MediaDrmCallback callback;
@ -71,6 +73,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
private Handler postRequestHandler;
private int openCount;
private boolean provisioningInProgress;
private int state;
private MediaCrypto mediaCrypto;
private Exception lastException;
@ -78,20 +81,33 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
private byte[] schemePsshData;
private byte[] sessionId;
/**
* @deprecated Use the other constructor, passing null as {@code optionalKeyRequestParameters}.
*/
@Deprecated
public StreamingDrmSessionManager(UUID uuid, Looper playbackLooper, MediaDrmCallback callback,
Handler eventHandler, EventListener eventListener) throws UnsupportedSchemeException {
this(uuid, playbackLooper, callback, null, eventHandler, eventListener);
}
/**
* @param uuid The UUID of the drm scheme.
* @param playbackLooper The looper associated with the media playback thread. Should usually be
* obtained using {@link com.google.android.exoplayer.ExoPlayer#getPlaybackLooper()}.
* @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument
* to {@link MediaDrm#getKeyRequest(byte[], byte[], String, int, HashMap)}. May be null.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedSchemeException If the specified DRM scheme is not supported.
*/
public StreamingDrmSessionManager(UUID uuid, Looper playbackLooper, MediaDrmCallback callback,
Handler eventHandler, EventListener eventListener) throws UnsupportedSchemeException {
HashMap<String, String> optionalKeyRequestParameters, Handler eventHandler,
EventListener eventListener) throws UnsupportedSchemeException {
this.uuid = uuid;
this.callback = callback;
this.optionalKeyRequestParameters = optionalKeyRequestParameters;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
mediaDrm = new MediaDrm(uuid);
@ -179,6 +195,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
return;
}
state = STATE_CLOSED;
provisioningInProgress = false;
mediaDrmHandler.removeCallbacksAndMessages(null);
postResponseHandler.removeCallbacksAndMessages(null);
postRequestHandler.removeCallbacksAndMessages(null);
@ -212,11 +229,16 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
private void postProvisionRequest() {
if (provisioningInProgress) {
return;
}
provisioningInProgress = true;
ProvisionRequest request = mediaDrm.getProvisionRequest();
postRequestHandler.obtainMessage(MSG_PROVISION, request).sendToTarget();
}
private void onProvisionResponse(Object response) {
provisioningInProgress = false;
if (state != STATE_OPENING && state != STATE_OPENED && state != STATE_OPENED_WITH_KEYS) {
// This event is stale.
return;
@ -243,7 +265,7 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
KeyRequest keyRequest;
try {
keyRequest = mediaDrm.getKeyRequest(sessionId, schemePsshData, mimeType,
MediaDrm.KEY_TYPE_STREAMING, null);
MediaDrm.KEY_TYPE_STREAMING, optionalKeyRequestParameters);
postRequestHandler.obtainMessage(MSG_KEYS, keyRequest).sendToTarget();
} catch (NotProvisionedException e) {
onKeysError(e);
@ -277,13 +299,13 @@ public class StreamingDrmSessionManager implements DrmSessionManager {
}
}
private void onError(Exception e) {
private void onError(final Exception e) {
lastException = e;
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onDrmSessionManagerError(lastException);
eventListener.onDrmSessionManagerError(e);
}
});
}

View File

@ -24,6 +24,10 @@ import java.util.ArrayList;
public static final int TYPE_esds = 0x65736473;
public static final int TYPE_mdat = 0x6D646174;
public static final int TYPE_mp4a = 0x6D703461;
public static final int TYPE_ac_3 = 0x61632D33; // ac-3
public static final int TYPE_dac3 = 0x64616333;
public static final int TYPE_ec_3 = 0x65632D33; // ec-3
public static final int TYPE_dec3 = 0x64656333;
public static final int TYPE_tfdt = 0x74666474;
public static final int TYPE_tfhd = 0x74666864;
public static final int TYPE_trex = 0x74726578;
@ -53,6 +57,7 @@ import java.util.ArrayList;
public static final int TYPE_saiz = 0x7361697A;
public static final int TYPE_uuid = 0x75756964;
public static final int TYPE_senc = 0x73656E63;
public static final int TYPE_pasp = 0x70617370;
public final int type;

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer.parser.mp4;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
@ -26,6 +27,7 @@ import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
@ -65,6 +67,11 @@ public final class FragmentedMp4Extractor implements Extractor {
private static final byte[] NAL_START_CODE = new byte[] {0, 0, 0, 1};
private static final byte[] PIFF_SAMPLE_ENCRYPTION_BOX_EXTENDED_TYPE =
new byte[] {-94, 57, 79, 82, 90, -101, 79, 20, -94, 68, 108, 66, 124, 100, -115, -12};
/** Channel counts for AC-3 audio, indexed by acmod. (See ETSI TS 102 366.) */
private static final int[] AC3_CHANNEL_COUNTS = new int[] {2, 1, 2, 3, 3, 4, 4, 5};
/** Nominal bit-rates for AC-3 audio in kbps, indexed by bit_rate_code. (See ETSI TS 102 366.) */
private static final int[] AC3_BIT_RATES = new int[] {32, 40, 48, 56, 64, 80, 96, 112, 128, 160,
192, 224, 256, 320, 384, 448, 512, 576, 640};
// Parser states
private static final int STATE_READING_ATOM_HEADER = 0;
@ -106,6 +113,7 @@ public final class FragmentedMp4Extractor implements Extractor {
parsedAtoms.add(Atom.TYPE_saiz);
parsedAtoms.add(Atom.TYPE_uuid);
parsedAtoms.add(Atom.TYPE_senc);
parsedAtoms.add(Atom.TYPE_pasp);
PARSED_ATOMS = Collections.unmodifiableSet(parsedAtoms);
}
@ -181,20 +189,6 @@ public final class FragmentedMp4Extractor implements Extractor {
this.track = track;
}
/**
* Sideloads pssh information into the extractor, so that it can be read through
* {@link #getPsshInfo()}.
*
* @param uuid The UUID of the scheme for which information is being sideloaded.
* @param data The corresponding data.
*/
public void putPsshInfo(UUID uuid, byte[] data) {
// TODO: This is for SmoothStreaming. Consider using something other than
// FragmentedMp4Extractor.getPsshInfo to obtain the pssh data for that use case, so that we can
// remove this method.
psshData.put(uuid, data);
}
@Override
public Map<UUID, byte[]> getPsshInfo() {
return psshData.isEmpty() ? null : psshData;
@ -245,7 +239,7 @@ public final class FragmentedMp4Extractor implements Extractor {
@Override
public boolean seekTo(long seekTimeUs, boolean allowNoop) {
pendingSeekTimeMs = (int) (seekTimeUs / 1000);
if (allowNoop && fragmentRun != null
if (allowNoop && fragmentRun != null && fragmentRun.length > 0
&& pendingSeekTimeMs >= fragmentRun.getSamplePresentationTime(0)
&& pendingSeekTimeMs <= fragmentRun.getSamplePresentationTime(fragmentRun.length - 1)) {
int sampleIndexFound = 0;
@ -454,7 +448,8 @@ public final class FragmentedMp4Extractor implements Extractor {
/**
* Parses a tkhd atom (defined in 14496-12).
*
* @return A {@link Pair} consisting of the track id and duration.
* @return A {@link Pair} consisting of the track id and duration (in the timescale indicated in
* the movie header box). The duration is set to -1 if the duration is unspecified.
*/
private static Pair<Integer, Long> parseTkhd(ParsableByteArray tkhd) {
tkhd.setPosition(ATOM_HEADER_SIZE);
@ -465,7 +460,23 @@ public final class FragmentedMp4Extractor implements Extractor {
int trackId = tkhd.readInt();
tkhd.skip(4);
long duration = version == 0 ? tkhd.readUnsignedInt() : tkhd.readUnsignedLongToLong();
boolean durationUnknown = true;
int durationPosition = tkhd.getPosition();
int durationByteCount = version == 0 ? 4 : 8;
for (int i = 0; i < durationByteCount; i++) {
if (tkhd.data[durationPosition + i] != -1) {
durationUnknown = false;
break;
}
}
long duration;
if (durationUnknown) {
tkhd.skip(durationByteCount);
duration = -1;
} else {
duration = version == 0 ? tkhd.readUnsignedInt() : tkhd.readUnsignedLongToLong();
}
return Pair.create(trackId, duration);
}
@ -511,11 +522,12 @@ public final class FragmentedMp4Extractor implements Extractor {
parseAvcFromParent(stsd, childStartPosition, childAtomSize);
mediaFormat = avc.first;
trackEncryptionBoxes[i] = avc.second;
} else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca) {
Pair<MediaFormat, TrackEncryptionBox> mp4a =
parseMp4aFromParent(stsd, childStartPosition, childAtomSize);
mediaFormat = mp4a.first;
trackEncryptionBoxes[i] = mp4a.second;
} else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca
|| childAtomType == Atom.TYPE_ac_3) {
Pair<MediaFormat, TrackEncryptionBox> audioSampleEntry =
parseAudioSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize);
mediaFormat = audioSampleEntry.first;
trackEncryptionBoxes[i] = audioSampleEntry.second;
}
stsd.setPosition(childStartPosition + childAtomSize);
}
@ -529,6 +541,7 @@ public final class FragmentedMp4Extractor implements Extractor {
parent.skip(24);
int width = parent.readUnsignedShort();
int height = parent.readUnsignedShort();
float pixelWidthHeightRatio = 1;
parent.skip(50);
List<byte[]> initializationData = null;
@ -543,24 +556,26 @@ public final class FragmentedMp4Extractor implements Extractor {
initializationData = parseAvcCFromParent(parent, childStartPosition);
} else if (childAtomType == Atom.TYPE_sinf) {
trackEncryptionBox = parseSinfFromParent(parent, childStartPosition, childAtomSize);
} else if (childAtomType == Atom.TYPE_pasp) {
pixelWidthHeightRatio = parsePaspFromParent(parent, childStartPosition);
}
childPosition += childAtomSize;
}
MediaFormat format = MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
width, height, initializationData);
width, height, pixelWidthHeightRatio, initializationData);
return Pair.create(format, trackEncryptionBox);
}
private static Pair<MediaFormat, TrackEncryptionBox> parseMp4aFromParent(ParsableByteArray parent,
int position, int size) {
private static Pair<MediaFormat, TrackEncryptionBox> parseAudioSampleEntry(
ParsableByteArray parent, int atomType, int position, int size) {
parent.setPosition(position + ATOM_HEADER_SIZE);
// Start of the mp4a atom (defined in 14496-14)
parent.skip(16);
int channelCount = parent.readUnsignedShort();
int sampleSize = parent.readUnsignedShort();
parent.skip(4);
int sampleRate = parent.readUnsignedFixedPoint1616();
int bitrate = MediaFormat.NO_VALUE;
byte[] initializationData = null;
TrackEncryptionBox trackEncryptionBox = null;
@ -570,25 +585,97 @@ public final class FragmentedMp4Extractor implements Extractor {
int childStartPosition = parent.getPosition();
int childAtomSize = parent.readInt();
int childAtomType = parent.readInt();
if (childAtomType == Atom.TYPE_esds) {
initializationData = parseEsdsFromParent(parent, childStartPosition);
// TODO: Do we really need to do this? See [redacted]
// Update sampleRate and channelCount from the AudioSpecificConfig initialization data.
Pair<Integer, Integer> audioSpecificConfig =
CodecSpecificDataUtil.parseAudioSpecificConfig(initializationData);
sampleRate = audioSpecificConfig.first;
channelCount = audioSpecificConfig.second;
} else if (childAtomType == Atom.TYPE_sinf) {
trackEncryptionBox = parseSinfFromParent(parent, childStartPosition, childAtomSize);
if (atomType == Atom.TYPE_mp4a || atomType == Atom.TYPE_enca) {
if (childAtomType == Atom.TYPE_esds) {
initializationData = parseEsdsFromParent(parent, childStartPosition);
// TODO: Do we really need to do this? See [Internal: b/10903778]
// Update sampleRate and channelCount from the AudioSpecificConfig initialization data.
Pair<Integer, Integer> audioSpecificConfig =
CodecSpecificDataUtil.parseAudioSpecificConfig(initializationData);
sampleRate = audioSpecificConfig.first;
channelCount = audioSpecificConfig.second;
} else if (childAtomType == Atom.TYPE_sinf) {
trackEncryptionBox = parseSinfFromParent(parent, childStartPosition, childAtomSize);
}
} else if (atomType == Atom.TYPE_ac_3 && childAtomType == Atom.TYPE_dac3) {
// TODO: Choose the right AC-3 track based on the contents of dac3/dec3.
Ac3Format ac3Format =
parseAc3SpecificBoxFromParent(parent, childStartPosition);
if (ac3Format != null) {
sampleRate = ac3Format.sampleRate;
channelCount = ac3Format.channelCount;
bitrate = ac3Format.bitrate;
}
// TODO: Add support for encrypted AC-3.
trackEncryptionBox = null;
} else if (atomType == Atom.TYPE_ec_3 && childAtomType == Atom.TYPE_dec3) {
sampleRate = parseEc3SpecificBoxFromParent(parent, childStartPosition);
trackEncryptionBox = null;
}
childPosition += childAtomSize;
}
MediaFormat format = MediaFormat.createAudioFormat("audio/mp4a-latm", sampleSize, channelCount,
sampleRate, Collections.singletonList(initializationData));
String mimeType;
if (atomType == Atom.TYPE_ac_3) {
mimeType = MimeTypes.AUDIO_AC3;
} else if (atomType == Atom.TYPE_ec_3) {
mimeType = MimeTypes.AUDIO_EC3;
} else {
mimeType = MimeTypes.AUDIO_AAC;
}
MediaFormat format = MediaFormat.createAudioFormat(
mimeType, sampleSize, channelCount, sampleRate, bitrate,
initializationData == null ? null : Collections.singletonList(initializationData));
return Pair.create(format, trackEncryptionBox);
}
private static Ac3Format parseAc3SpecificBoxFromParent(ParsableByteArray parent, int position) {
// Start of the dac3 atom (defined in ETSI TS 102 366)
parent.setPosition(position + ATOM_HEADER_SIZE);
// fscod (sample rate code)
int fscod = (parent.readUnsignedByte() & 0xC0) >> 6;
int sampleRate;
switch (fscod) {
case 0:
sampleRate = 48000;
break;
case 1:
sampleRate = 44100;
break;
case 2:
sampleRate = 32000;
break;
default:
// TODO: The decoder should not use this stream.
return null;
}
int nextByte = parent.readUnsignedByte();
// Map acmod (audio coding mode) onto a channel count.
int channelCount = AC3_CHANNEL_COUNTS[(nextByte & 0x38) >> 3];
// lfeon (low frequency effects on)
if ((nextByte & 0x04) != 0) {
channelCount++;
}
// Map bit_rate_code onto a bit-rate in kbit/s.
int bitrate = AC3_BIT_RATES[((nextByte & 0x03) << 3) + (parent.readUnsignedByte() >> 5)];
return new Ac3Format(channelCount, sampleRate, bitrate);
}
private static int parseEc3SpecificBoxFromParent(ParsableByteArray parent, int position) {
// Start of the dec3 atom (defined in ETSI TS 102 366)
parent.setPosition(position + ATOM_HEADER_SIZE);
// TODO: Implement parsing for enhanced AC-3 with multiple sub-streams.
return 0;
}
private static List<byte[]> parseAvcCFromParent(ParsableByteArray parent, int position) {
parent.setPosition(position + ATOM_HEADER_SIZE + 4);
// Start of the AVCDecoderConfigurationRecord (defined in 14496-15)
@ -643,6 +730,13 @@ public final class FragmentedMp4Extractor implements Extractor {
return trackEncryptionBox;
}
private static float parsePaspFromParent(ParsableByteArray parent, int position) {
parent.setPosition(position + ATOM_HEADER_SIZE);
int hSpacing = parent.readUnsignedIntToInt();
int vSpacing = parent.readUnsignedIntToInt();
return (float) hSpacing / vSpacing;
}
private static TrackEncryptionBox parseSchiFromParent(ParsableByteArray parent, int position,
int size) {
int childPosition = position + ATOM_HEADER_SIZE;
@ -740,9 +834,12 @@ public final class FragmentedMp4Extractor implements Extractor {
parseSenc(senc.data, out);
}
LeafAtom uuid = traf.getLeafAtomOfType(Atom.TYPE_uuid);
if (uuid != null) {
parseUuid(uuid.data, out, extendedTypeScratch);
int childrenSize = traf.children.size();
for (int i = 0; i < childrenSize; i++) {
Atom atom = traf.children.get(i);
if (atom.type == Atom.TYPE_uuid) {
parseUuid(((LeafAtom) atom).data, out, extendedTypeScratch);
}
}
}
@ -855,8 +952,8 @@ public final class FragmentedMp4Extractor implements Extractor {
out.initTables(sampleCount);
int[] sampleSizeTable = out.sampleSizeTable;
int[] sampleDecodingTimeTable = out.sampleDecodingTimeTable;
int[] sampleCompositionTimeOffsetTable = out.sampleCompositionTimeOffsetTable;
long[] sampleDecodingTimeTable = out.sampleDecodingTimeTable;
boolean[] sampleIsSyncFrameTable = out.sampleIsSyncFrameTable;
long timescale = track.timescale;
@ -882,7 +979,7 @@ public final class FragmentedMp4Extractor implements Extractor {
} else {
sampleCompositionTimeOffsetTable[i] = 0;
}
sampleDecodingTimeTable[i] = (int) ((cumulativeTime * 1000) / timescale);
sampleDecodingTimeTable[i] = (cumulativeTime * 1000) / timescale;
sampleSizeTable[i] = sampleSize;
sampleIsSyncFrameTable[i] = ((sampleFlags >> 16) & 0x1) == 0
&& (!workaroundEveryVideoFrameIsSyncFrame || i == 0);
@ -961,6 +1058,7 @@ public final class FragmentedMp4Extractor implements Extractor {
long offset = firstOffset;
long time = earliestPresentationTime;
long timeUs = Util.scaleLargeTimestamp(time, C.MICROS_PER_SECOND, timescale);
for (int i = 0; i < referenceCount; i++) {
int firstInt = atom.readInt();
@ -975,10 +1073,10 @@ public final class FragmentedMp4Extractor implements Extractor {
// Calculate time and duration values such that any rounding errors are consistent. i.e. That
// timesUs[i] + durationsUs[i] == timesUs[i + 1].
timesUs[i] = (time * 1000000L) / timescale;
long nextTimeUs = ((time + referenceDuration) * 1000000L) / timescale;
durationsUs[i] = nextTimeUs - timesUs[i];
timesUs[i] = timeUs;
time += referenceDuration;
timeUs = Util.scaleLargeTimestamp(time, C.MICROS_PER_SECOND, timescale);
durationsUs[i] = timeUs - timesUs[i];
atom.skip(4);
offset += sizes[i];
@ -1055,21 +1153,20 @@ public final class FragmentedMp4Extractor implements Extractor {
if (out == null) {
return RESULT_NEED_SAMPLE_HOLDER;
}
ByteBuffer outputData = out.data;
out.timeUs = fragmentRun.getSamplePresentationTime(sampleIndex) * 1000L;
out.flags = 0;
if (fragmentRun.sampleIsSyncFrameTable[sampleIndex]) {
out.flags |= MediaExtractor.SAMPLE_FLAG_SYNC;
lastSyncSampleIndex = sampleIndex;
}
if (out.allowDataBufferReplacement && (out.data == null || out.data.capacity() < sampleSize)) {
outputData = ByteBuffer.allocate(sampleSize);
out.data = outputData;
if (out.data == null || out.data.capacity() < sampleSize) {
out.replaceBuffer(sampleSize);
}
if (fragmentRun.definesEncryptionData) {
readSampleEncryptionData(fragmentRun.sampleEncryptionData, out);
}
ByteBuffer outputData = out.data;
if (outputData == null) {
inputStream.skip(sampleSize);
out.size = 0;
@ -1169,4 +1266,19 @@ public final class FragmentedMp4Extractor implements Extractor {
return result;
}
/** Represents the format for AC-3 audio. */
private static final class Ac3Format {
public final int channelCount;
public final int sampleRate;
public final int bitrate;
public Ac3Format(int channelCount, int sampleRate, int bitrate) {
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.bitrate = bitrate;
}
}
}

View File

@ -32,14 +32,14 @@ import com.google.android.exoplayer.upstream.NonBlockingInputStream;
* The size of each sample in the run.
*/
public int[] sampleSizeTable;
/**
* The decoding time of each sample in the run.
*/
public int[] sampleDecodingTimeTable;
/**
* The composition time offset of each sample in the run.
*/
public int[] sampleCompositionTimeOffsetTable;
/**
* The decoding time of each sample in the run.
*/
public long[] sampleDecodingTimeTable;
/**
* Indicates which samples are sync frames.
*/
@ -95,8 +95,8 @@ import com.google.android.exoplayer.upstream.NonBlockingInputStream;
// likely. The choice of 25% is relatively arbitrary.
int tableSize = (sampleCount * 125) / 100;
sampleSizeTable = new int[tableSize];
sampleDecodingTimeTable = new int[tableSize];
sampleCompositionTimeOffsetTable = new int[tableSize];
sampleDecodingTimeTable = new long[tableSize];
sampleIsSyncFrameTable = new boolean[tableSize];
sampleHasSubsampleEncryptionTable = new boolean[tableSize];
}
@ -147,7 +147,7 @@ import com.google.android.exoplayer.upstream.NonBlockingInputStream;
return true;
}
public int getSamplePresentationTime(int index) {
public long getSamplePresentationTime(int index) {
return sampleDecodingTimeTable[index] + sampleCompositionTimeOffsetTable[index];
}

View File

@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import com.google.android.exoplayer.util.Assertions;
@ -133,7 +135,7 @@ import java.util.Stack;
}
@Override
public int read(NonBlockingInputStream inputStream) {
public int read(NonBlockingInputStream inputStream) throws ParserException {
Assertions.checkState(eventHandler != null);
while (true) {
while (!masterElementsStack.isEmpty()
@ -210,7 +212,7 @@ import java.util.Stack;
if (stringResult != READ_RESULT_CONTINUE) {
return stringResult;
}
String stringValue = new String(stringBytes, Charset.forName("UTF-8"));
String stringValue = new String(stringBytes, Charset.forName(C.UTF8_NAME));
stringBytes = null;
eventHandler.onStringElement(elementId, stringValue);
prepareForNextElement();

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import java.nio.ByteBuffer;
@ -46,41 +47,47 @@ import java.nio.ByteBuffer;
* @param elementOffsetBytes The byte offset where this element starts
* @param headerSizeBytes The byte length of this element's ID and size header
* @param contentsSizeBytes The byte length of this element's children
* @throws ParserException If a parsing error occurs.
*/
public void onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes);
int id, long elementOffsetBytes, int headerSizeBytes,
long contentsSizeBytes) throws ParserException;
/**
* Called when a master element has finished reading in all of its children from the
* {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @throws ParserException If a parsing error occurs.
*/
public void onMasterElementEnd(int id);
public void onMasterElementEnd(int id) throws ParserException;
/**
* Called when an integer element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The integer value this element contains
* @throws ParserException If a parsing error occurs.
*/
public void onIntegerElement(int id, long value);
public void onIntegerElement(int id, long value) throws ParserException;
/**
* Called when a float element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The float value this element contains
* @throws ParserException If a parsing error occurs.
*/
public void onFloatElement(int id, double value);
public void onFloatElement(int id, double value) throws ParserException;
/**
* Called when a string element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The string value this element contains
* @throws ParserException If a parsing error occurs.
*/
public void onStringElement(int id, String value);
public void onStringElement(int id, String value) throws ParserException;
/**
* Called when a binary element is encountered in the {@link NonBlockingInputStream}.
@ -109,9 +116,10 @@ import java.nio.ByteBuffer;
* @param inputStream The {@link NonBlockingInputStream} from which this
* element's contents should be read
* @return True if the element was read. False otherwise.
* @throws ParserException If a parsing error occurs.
*/
public boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream);
NonBlockingInputStream inputStream) throws ParserException;
}

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.upstream.NonBlockingInputStream;
import java.nio.ByteBuffer;
@ -53,8 +54,9 @@ import java.nio.ByteBuffer;
*
* @param inputStream The input stream from which data should be read
* @return One of the {@code RESULT_*} flags defined in this interface
* @throws ParserException If parsing fails.
*/
public int read(NonBlockingInputStream inputStream);
public int read(NonBlockingInputStream inputStream) throws ParserException;
/**
* The total number of bytes consumed by the reader since first created or last {@link #reset()}.

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer.parser.webm;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.parser.Extractor;
import com.google.android.exoplayer.parser.SegmentIndex;
@ -27,6 +28,7 @@ import android.annotation.TargetApi;
import android.media.MediaExtractor;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
@ -44,6 +46,8 @@ public final class WebmExtractor implements Extractor {
private static final String DOC_TYPE_WEBM = "webm";
private static final String CODEC_ID_VP9 = "V_VP9";
private static final String CODEC_ID_VORBIS = "A_VORBIS";
private static final int VORBIS_MAX_INPUT_SIZE = 8192;
private static final int UNKNOWN = -1;
// Element IDs
@ -65,9 +69,13 @@ public final class WebmExtractor implements Extractor {
private static final int ID_TRACKS = 0x1654AE6B;
private static final int ID_TRACK_ENTRY = 0xAE;
private static final int ID_CODEC_ID = 0x86;
private static final int ID_CODEC_PRIVATE = 0x63A2;
private static final int ID_VIDEO = 0xE0;
private static final int ID_PIXEL_WIDTH = 0xB0;
private static final int ID_PIXEL_HEIGHT = 0xBA;
private static final int ID_AUDIO = 0xE1;
private static final int ID_CHANNELS = 0x9F;
private static final int ID_SAMPLING_FREQUENCY = 0xB5;
private static final int ID_CUES = 0x1C53BB6B;
private static final int ID_CUE_POINT = 0xBB;
@ -96,6 +104,10 @@ public final class WebmExtractor implements Extractor {
private long durationUs = UNKNOWN;
private int pixelWidth = UNKNOWN;
private int pixelHeight = UNKNOWN;
private int channelCount = UNKNOWN;
private int sampleRate = UNKNOWN;
private byte[] codecPrivate;
private boolean seenAudioTrack;
private long cuesSizeBytes = UNKNOWN;
private long clusterTimecodeUs = UNKNOWN;
private long simpleBlockTimecodeUs = UNKNOWN;
@ -114,7 +126,8 @@ public final class WebmExtractor implements Extractor {
}
@Override
public int read(NonBlockingInputStream inputStream, SampleHolder sampleHolder) {
public int read(
NonBlockingInputStream inputStream, SampleHolder sampleHolder) throws ParserException {
this.sampleHolder = sampleHolder;
this.readResults = 0;
while ((readResults & READ_TERMINATING_RESULTS) == 0) {
@ -176,6 +189,7 @@ public final class WebmExtractor implements Extractor {
case ID_CLUSTER:
case ID_TRACKS:
case ID_TRACK_ENTRY:
case ID_AUDIO:
case ID_VIDEO:
case ID_CUES:
case ID_CUE_POINT:
@ -187,6 +201,7 @@ public final class WebmExtractor implements Extractor {
case ID_TIME_CODE:
case ID_PIXEL_WIDTH:
case ID_PIXEL_HEIGHT:
case ID_CHANNELS:
case ID_CUE_TIME:
case ID_CUE_CLUSTER_POSITION:
return EbmlReader.TYPE_UNSIGNED_INT;
@ -194,8 +209,10 @@ public final class WebmExtractor implements Extractor {
case ID_CODEC_ID:
return EbmlReader.TYPE_STRING;
case ID_SIMPLE_BLOCK:
case ID_CODEC_PRIVATE:
return EbmlReader.TYPE_BINARY;
case ID_DURATION:
case ID_SAMPLING_FREQUENCY:
return EbmlReader.TYPE_FLOAT;
default:
return EbmlReader.TYPE_UNKNOWN;
@ -203,11 +220,12 @@ public final class WebmExtractor implements Extractor {
}
/* package */ boolean onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
int id, long elementOffsetBytes, int headerSizeBytes,
long contentsSizeBytes) throws ParserException {
switch (id) {
case ID_SEGMENT:
if (segmentStartOffsetBytes != UNKNOWN || segmentEndOffsetBytes != UNKNOWN) {
throw new IllegalStateException("Multiple Segment elements not supported");
throw new ParserException("Multiple Segment elements not supported");
}
segmentStartOffsetBytes = elementOffsetBytes + headerSizeBytes;
segmentEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
@ -223,31 +241,41 @@ public final class WebmExtractor implements Extractor {
return true;
}
/* package */ boolean onMasterElementEnd(int id) {
/* package */ boolean onMasterElementEnd(int id) throws ParserException {
switch (id) {
case ID_CUES:
buildCues();
return false;
case ID_VIDEO:
buildFormat();
buildVideoFormat();
return true;
case ID_AUDIO:
seenAudioTrack = true;
return true;
case ID_TRACK_ENTRY:
if (seenAudioTrack) {
// Audio format has to be built here since codec private may not be available at the end
// of ID_AUDIO.
buildAudioFormat();
}
return true;
default:
return true;
}
}
/* package */ boolean onIntegerElement(int id, long value) {
/* package */ boolean onIntegerElement(int id, long value) throws ParserException {
switch (id) {
case ID_EBML_READ_VERSION:
// Validate that EBMLReadVersion is supported. This extractor only supports v1.
if (value != 1) {
throw new IllegalArgumentException("EBMLReadVersion " + value + " not supported");
throw new ParserException("EBMLReadVersion " + value + " not supported");
}
break;
case ID_DOC_TYPE_READ_VERSION:
// Validate that DocTypeReadVersion is supported. This extractor only supports up to v2.
if (value < 1 || value > 2) {
throw new IllegalArgumentException("DocTypeReadVersion " + value + " not supported");
throw new ParserException("DocTypeReadVersion " + value + " not supported");
}
break;
case ID_TIMECODE_SCALE:
@ -259,6 +287,9 @@ public final class WebmExtractor implements Extractor {
case ID_PIXEL_HEIGHT:
pixelHeight = (int) value;
break;
case ID_CHANNELS:
channelCount = (int) value;
break;
case ID_CUE_TIME:
cueTimesUs.add(scaleTimecodeToUs(value));
break;
@ -275,24 +306,31 @@ public final class WebmExtractor implements Extractor {
}
/* package */ boolean onFloatElement(int id, double value) {
if (id == ID_DURATION) {
durationUs = scaleTimecodeToUs((long) value);
switch (id) {
case ID_DURATION:
durationUs = scaleTimecodeToUs((long) value);
break;
case ID_SAMPLING_FREQUENCY:
sampleRate = (int) value;
break;
default:
// pass
}
return true;
}
/* package */ boolean onStringElement(int id, String value) {
/* package */ boolean onStringElement(int id, String value) throws ParserException {
switch (id) {
case ID_DOC_TYPE:
// Validate that DocType is supported. This extractor only supports "webm".
if (!DOC_TYPE_WEBM.equals(value)) {
throw new IllegalArgumentException("DocType " + value + " not supported");
throw new ParserException("DocType " + value + " not supported");
}
break;
case ID_CODEC_ID:
// Validate that CodecID is supported. This extractor only supports "V_VP9".
if (!CODEC_ID_VP9.equals(value)) {
throw new IllegalArgumentException("CodecID " + value + " not supported");
// Validate that CodecID is supported. This extractor only supports "V_VP9" and "A_VORBIS".
if (!CODEC_ID_VP9.equals(value) && !CODEC_ID_VORBIS.equals(value)) {
throw new ParserException("CodecID " + value + " not supported");
}
break;
default:
@ -303,64 +341,70 @@ public final class WebmExtractor implements Extractor {
/* package */ boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
if (id == ID_SIMPLE_BLOCK) {
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
// for info about how data is organized in a SimpleBlock element.
NonBlockingInputStream inputStream) throws ParserException {
switch (id) {
case ID_SIMPLE_BLOCK:
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
// for info about how data is organized in a SimpleBlock element.
// If we don't have a sample holder then don't consume the data.
if (sampleHolder == null) {
readResults |= RESULT_NEED_SAMPLE_HOLDER;
return false;
}
// If we don't have a sample holder then don't consume the data.
if (sampleHolder == null) {
readResults |= RESULT_NEED_SAMPLE_HOLDER;
return false;
}
// Value of trackNumber is not used but needs to be read.
reader.readVarint(inputStream);
// Value of trackNumber is not used but needs to be read.
reader.readVarint(inputStream);
// Next three bytes have timecode and flags.
reader.readBytes(inputStream, simpleBlockTimecodeAndFlags, 3);
// Next three bytes have timecode and flags.
reader.readBytes(inputStream, simpleBlockTimecodeAndFlags, 3);
// First two bytes of the three are the relative timecode.
int timecode =
(simpleBlockTimecodeAndFlags[0] << 8) | (simpleBlockTimecodeAndFlags[1] & 0xff);
long timecodeUs = scaleTimecodeToUs(timecode);
// First two bytes of the three are the relative timecode.
int timecode =
(simpleBlockTimecodeAndFlags[0] << 8) | (simpleBlockTimecodeAndFlags[1] & 0xff);
long timecodeUs = scaleTimecodeToUs(timecode);
// Last byte of the three has some flags and the lacing value.
boolean keyframe = (simpleBlockTimecodeAndFlags[2] & 0x80) == 0x80;
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
// Last byte of the three has some flags and the lacing value.
boolean keyframe = (simpleBlockTimecodeAndFlags[2] & 0x80) == 0x80;
boolean invisible = (simpleBlockTimecodeAndFlags[2] & 0x08) == 0x08;
int lacing = (simpleBlockTimecodeAndFlags[2] & 0x06) >> 1;
// Validate lacing and set info into sample holder.
switch (lacing) {
case LACING_NONE:
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? MediaExtractor.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
break;
case LACING_EBML:
case LACING_FIXED:
case LACING_XIPH:
default:
throw new IllegalStateException("Lacing mode " + lacing + " not supported");
}
// Validate lacing and set info into sample holder.
switch (lacing) {
case LACING_NONE:
long elementEndOffsetBytes = elementOffsetBytes + headerSizeBytes + contentsSizeBytes;
simpleBlockTimecodeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.flags = keyframe ? MediaExtractor.SAMPLE_FLAG_SYNC : 0;
sampleHolder.decodeOnly = invisible;
sampleHolder.timeUs = clusterTimecodeUs + timecodeUs;
sampleHolder.size = (int) (elementEndOffsetBytes - reader.getBytesRead());
break;
case LACING_EBML:
case LACING_FIXED:
case LACING_XIPH:
default:
throw new ParserException("Lacing mode " + lacing + " not supported");
}
ByteBuffer outputData = sampleHolder.data;
if (sampleHolder.allowDataBufferReplacement
&& (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size)) {
outputData = ByteBuffer.allocate(sampleHolder.size);
sampleHolder.data = outputData;
}
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleHolder.size) {
sampleHolder.replaceBuffer(sampleHolder.size);
}
if (outputData == null) {
reader.skipBytes(inputStream, sampleHolder.size);
sampleHolder.size = 0;
} else {
reader.readBytes(inputStream, outputData, sampleHolder.size);
}
readResults |= RESULT_READ_SAMPLE;
ByteBuffer outputData = sampleHolder.data;
if (outputData == null) {
reader.skipBytes(inputStream, sampleHolder.size);
sampleHolder.size = 0;
} else {
reader.readBytes(inputStream, outputData, sampleHolder.size);
}
readResults |= RESULT_READ_SAMPLE;
break;
case ID_CODEC_PRIVATE:
codecPrivate = new byte[contentsSizeBytes];
reader.readBytes(inputStream, codecPrivate, contentsSizeBytes);
break;
default:
// pass
}
return true;
}
@ -374,16 +418,38 @@ public final class WebmExtractor implements Extractor {
*
* <p>Replaces the previous {@link #format} only if video width/height have changed.
* {@link #format} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link IllegalStateException} will be thrown.
* the event that it can't be built, an {@link ParserException} will be thrown.
*/
private void buildFormat() {
private void buildVideoFormat() throws ParserException {
if (pixelWidth != UNKNOWN && pixelHeight != UNKNOWN
&& (format == null || format.width != pixelWidth || format.height != pixelHeight)) {
format = MediaFormat.createVideoFormat(
MimeTypes.VIDEO_VP9, MediaFormat.NO_VALUE, pixelWidth, pixelHeight, null);
readResults |= RESULT_READ_INIT;
} else if (format == null) {
throw new IllegalStateException("Unable to build format");
throw new ParserException("Unable to build format");
}
}
/**
* Build an audio {@link MediaFormat} containing recently gathered Audio information, if needed.
*
* <p>Replaces the previous {@link #format} only if audio channel count/sample rate have changed.
* {@link #format} is guaranteed to not be null after calling this method.
*
* @throws ParserException If an error occurs when parsing codec's private data or if the format
* can't be built.
*/
private void buildAudioFormat() throws ParserException {
if (channelCount != UNKNOWN && sampleRate != UNKNOWN
&& (format == null || format.channelCount != channelCount
|| format.sampleRate != sampleRate)) {
format = MediaFormat.createAudioFormat(
MimeTypes.AUDIO_VORBIS, VORBIS_MAX_INPUT_SIZE,
sampleRate, channelCount, parseVorbisCodecPrivate());
readResults |= RESULT_READ_INIT;
} else if (format == null) {
throw new ParserException("Unable to build format");
}
}
@ -391,18 +457,18 @@ public final class WebmExtractor implements Extractor {
* Build a {@link SegmentIndex} containing recently gathered Cues information.
*
* <p>{@link #cues} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link IllegalStateException} will be thrown.
* the event that it can't be built, an {@link ParserException} will be thrown.
*/
private void buildCues() {
private void buildCues() throws ParserException {
if (segmentStartOffsetBytes == UNKNOWN) {
throw new IllegalStateException("Segment start/end offsets unknown");
throw new ParserException("Segment start/end offsets unknown");
} else if (durationUs == UNKNOWN) {
throw new IllegalStateException("Duration unknown");
throw new ParserException("Duration unknown");
} else if (cuesSizeBytes == UNKNOWN) {
throw new IllegalStateException("Cues size unknown");
throw new ParserException("Cues size unknown");
} else if (cueTimesUs == null || cueClusterPositions == null
|| cueTimesUs.size() == 0 || cueTimesUs.size() != cueClusterPositions.size()) {
throw new IllegalStateException("Invalid/missing cue points");
throw new ParserException("Invalid/missing cue points");
}
int cuePointsSize = cueTimesUs.size();
int[] sizes = new int[cuePointsSize];
@ -425,6 +491,58 @@ public final class WebmExtractor implements Extractor {
readResults |= RESULT_READ_INDEX;
}
/**
* Parses Vorbis Codec Private data and adds it as initialization data to the {@link #format}.
* WebM Vorbis Codec Private data specification can be found
* <a href="http://matroska.org/technical/specs/codecid/index.html">here</a>.
*
* @return ArrayList of byte arrays containing the initialization data on success.
* @throws ParserException If parsing codec private data fails.
*/
private ArrayList<byte[]> parseVorbisCodecPrivate() throws ParserException {
try {
if (codecPrivate[0] != 0x02) {
throw new ParserException("Error parsing vorbis codec private");
}
int offset = 1;
int vorbisInfoLength = 0;
while (codecPrivate[offset] == (byte) 0xFF) {
vorbisInfoLength += 0xFF;
offset++;
}
vorbisInfoLength += codecPrivate[offset++];
int vorbisSkipLength = 0;
while (codecPrivate[offset] == (byte) 0xFF) {
vorbisSkipLength += 0xFF;
offset++;
}
vorbisSkipLength += codecPrivate[offset++];
if (codecPrivate[offset] != 0x01) {
throw new ParserException("Error parsing vorbis codec private");
}
byte[] vorbisInfo = new byte[vorbisInfoLength];
System.arraycopy(codecPrivate, offset, vorbisInfo, 0, vorbisInfoLength);
offset += vorbisInfoLength;
if (codecPrivate[offset] != 0x03) {
throw new ParserException("Error parsing vorbis codec private");
}
offset += vorbisSkipLength;
if (codecPrivate[offset] != 0x05) {
throw new ParserException("Error parsing vorbis codec private");
}
byte[] vorbisBooks = new byte[codecPrivate.length - offset];
System.arraycopy(codecPrivate, offset, vorbisBooks, 0, codecPrivate.length - offset);
ArrayList<byte[]> initializationData = new ArrayList<byte[]>(2);
initializationData.add(vorbisInfo);
initializationData.add(vorbisBooks);
return initializationData;
} catch (ArrayIndexOutOfBoundsException e) {
throw new ParserException("Error parsing vorbis codec private");
}
}
/**
* Passes events through to {@link WebmExtractor} as
* callbacks from {@link EbmlReader} are received.
@ -438,18 +556,19 @@ public final class WebmExtractor implements Extractor {
@Override
public void onMasterElementStart(
int id, long elementOffsetBytes, int headerSizeBytes, long contentsSizeBytes) {
int id, long elementOffsetBytes, int headerSizeBytes,
long contentsSizeBytes) throws ParserException {
WebmExtractor.this.onMasterElementStart(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes);
}
@Override
public void onMasterElementEnd(int id) {
public void onMasterElementEnd(int id) throws ParserException {
WebmExtractor.this.onMasterElementEnd(id);
}
@Override
public void onIntegerElement(int id, long value) {
public void onIntegerElement(int id, long value) throws ParserException {
WebmExtractor.this.onIntegerElement(id, value);
}
@ -459,14 +578,14 @@ public final class WebmExtractor implements Extractor {
}
@Override
public void onStringElement(int id, String value) {
public void onStringElement(int id, String value) throws ParserException {
WebmExtractor.this.onStringElement(id, value);
}
@Override
public boolean onBinaryElement(
int id, long elementOffsetBytes, int headerSizeBytes, int contentsSizeBytes,
NonBlockingInputStream inputStream) {
NonBlockingInputStream inputStream) throws ParserException {
return WebmExtractor.this.onBinaryElement(
id, elementOffsetBytes, headerSizeBytes, contentsSizeBytes, inputStream);
}

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.BehindLiveWindowException;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.TrackInfo;
import com.google.android.exoplayer.chunk.Chunk;
@ -36,8 +37,10 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.Trac
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.ManifestFetcher;
import android.net.Uri;
import android.os.SystemClock;
import android.util.Base64;
import android.util.SparseArray;
@ -45,29 +48,68 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* An {@link ChunkSource} for SmoothStreaming.
*/
public class SmoothStreamingChunkSource implements ChunkSource {
private static final int MINIMUM_MANIFEST_REFRESH_PERIOD_MS = 5000;
private static final int INITIALIZATION_VECTOR_SIZE = 8;
private final String baseUrl;
private final StreamElement streamElement;
private final ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
private final int streamElementIndex;
private final TrackInfo trackInfo;
private final DataSource dataSource;
private final FormatEvaluator formatEvaluator;
private final Evaluation evaluation;
private final long liveEdgeLatencyUs;
private final int maxWidth;
private final int maxHeight;
private final SparseArray<FragmentedMp4Extractor> extractors;
private final Map<UUID, byte[]> psshInfo;
private final SmoothStreamingFormat[] formats;
private SmoothStreamingManifest currentManifest;
private int currentManifestChunkOffset;
private boolean finishedCurrentManifest;
private IOException fatalError;
/**
* @param baseUrl The base URL for the streams.
* Constructor to use for live streaming.
* <p>
* May also be used for fixed duration content, in which case the call is equivalent to calling
* the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument.
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param streamElementIndex The index of the stream element in the manifest to be provided by
* the source.
* @param trackIndices The indices of the tracks within the stream element to be considered by
* the source. May be null if all tracks within the element should be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however
* note that the value sets an upper bound on the length of media that the player can buffer.
* Hence a small value may increase the probability of rebuffering and playback failures.
*/
public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
int streamElementIndex, int[] trackIndices, DataSource dataSource,
FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
this(manifestFetcher, manifestFetcher.getManifest(), streamElementIndex, trackIndices,
dataSource, formatEvaluator, liveEdgeLatencyMs);
}
/**
* Constructor to use for fixed duration content.
*
* @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}.
* @param streamElementIndex The index of the stream element in the manifest to be provided by
* the source.
@ -76,22 +118,34 @@ public class SmoothStreamingChunkSource implements ChunkSource {
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
*/
public SmoothStreamingChunkSource(String baseUrl, SmoothStreamingManifest manifest,
int streamElementIndex, int[] trackIndices, DataSource dataSource,
FormatEvaluator formatEvaluator) {
this.baseUrl = baseUrl;
this.streamElement = manifest.streamElements[streamElementIndex];
this.trackInfo = new TrackInfo(streamElement.tracks[0].mimeType, manifest.getDurationUs());
public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementIndex,
int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator) {
this(null, manifest, streamElementIndex, trackIndices, dataSource, formatEvaluator, 0);
}
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher,
SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices,
DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) {
this.manifestFetcher = manifestFetcher;
this.streamElementIndex = streamElementIndex;
this.currentManifest = initialManifest;
this.dataSource = dataSource;
this.formatEvaluator = formatEvaluator;
this.evaluation = new Evaluation();
this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000;
StreamElement streamElement = getElement(initialManifest);
trackInfo = new TrackInfo(streamElement.tracks[0].mimeType, initialManifest.durationUs);
evaluation = new Evaluation();
TrackEncryptionBox[] trackEncryptionBoxes = null;
ProtectionElement protectionElement = manifest.protectionElement;
ProtectionElement protectionElement = initialManifest.protectionElement;
if (protectionElement != null) {
byte[] keyId = getKeyId(protectionElement.data);
trackEncryptionBoxes = new TrackEncryptionBox[1];
trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId);
psshInfo = Collections.singletonMap(protectionElement.uuid, protectionElement.data);
} else {
psshInfo = null;
}
int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length;
@ -113,11 +167,8 @@ public class SmoothStreamingChunkSource implements ChunkSource {
: Track.TYPE_AUDIO;
FragmentedMp4Extractor extractor = new FragmentedMp4Extractor(
FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME);
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timeScale, mediaFormat,
extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale, mediaFormat,
trackEncryptionBoxes));
if (protectionElement != null) {
extractor.putPsshInfo(protectionElement.uuid, protectionElement.data);
}
extractors.put(trackIndex, extractor);
}
this.maxHeight = maxHeight;
@ -139,22 +190,52 @@ public class SmoothStreamingChunkSource implements ChunkSource {
@Override
public void enable() {
// Do nothing.
fatalError = null;
if (manifestFetcher != null) {
manifestFetcher.enable();
}
}
@Override
public void disable(List<? extends MediaChunk> queue) {
// Do nothing.
if (manifestFetcher != null) {
manifestFetcher.disable();
}
}
@Override
public void continueBuffering(long playbackPositionUs) {
// Do nothing
if (manifestFetcher == null || !currentManifest.isLive || fatalError != null) {
return;
}
SmoothStreamingManifest newManifest = manifestFetcher.getManifest();
if (currentManifest != newManifest && newManifest != null) {
StreamElement currentElement = getElement(currentManifest);
StreamElement newElement = getElement(newManifest);
if (newElement.chunkCount == 0) {
currentManifestChunkOffset += currentElement.chunkCount;
} else if (currentElement.chunkCount > 0) {
currentManifestChunkOffset += currentElement.getChunkIndex(newElement.getStartTimeUs(0));
}
currentManifest = newManifest;
finishedCurrentManifest = false;
}
if (finishedCurrentManifest && (SystemClock.elapsedRealtime()
> manifestFetcher.getManifestLoadTimestamp() + MINIMUM_MANIFEST_REFRESH_PERIOD_MS)) {
manifestFetcher.requestRefresh();
}
}
@Override
public final void getChunkOperation(List<? extends MediaChunk> queue, long seekPositionUs,
long playbackPositionUs, ChunkOperationHolder out) {
if (fatalError != null) {
out.chunk = null;
return;
}
evaluation.queueSize = queue.size();
formatEvaluator.evaluate(queue, playbackPositionUs, formats, evaluation);
SmoothStreamingFormat selectedFormat = (SmoothStreamingFormat) evaluation.format;
@ -170,32 +251,63 @@ public class SmoothStreamingChunkSource implements ChunkSource {
return;
}
int nextChunkIndex;
if (queue.isEmpty()) {
nextChunkIndex = streamElement.getChunkIndex(seekPositionUs);
} else {
nextChunkIndex = queue.get(out.queueSize - 1).nextChunkIndex;
}
// In all cases where we return before instantiating a new chunk at the bottom of this method,
// we want out.chunk to be null.
out.chunk = null;
if (nextChunkIndex == -1) {
out.chunk = null;
StreamElement streamElement = getElement(currentManifest);
if (streamElement.chunkCount == 0) {
// The manifest is currently empty for this stream.
finishedCurrentManifest = true;
return;
}
boolean isLastChunk = nextChunkIndex == streamElement.chunkCount - 1;
String requestUrl = streamElement.buildRequestUrl(selectedFormat.trackIndex,
nextChunkIndex);
Uri uri = Uri.parse(baseUrl + '/' + requestUrl);
int chunkIndex;
if (queue.isEmpty()) {
if (currentManifest.isLive) {
seekPositionUs = getLiveSeekPosition();
}
chunkIndex = streamElement.getChunkIndex(seekPositionUs);
} else {
chunkIndex = queue.get(out.queueSize - 1).nextChunkIndex - currentManifestChunkOffset;
}
if (currentManifest.isLive) {
if (chunkIndex < 0) {
// This is before the first chunk in the current manifest.
fatalError = new BehindLiveWindowException();
return;
} else if (chunkIndex >= streamElement.chunkCount) {
// This is beyond the last chunk in the current manifest.
finishedCurrentManifest = true;
return;
} else if (chunkIndex == streamElement.chunkCount - 1) {
// This is the last chunk in the current manifest. Mark the manifest as being finished,
// but continue to return the final chunk.
finishedCurrentManifest = true;
}
} else if (chunkIndex == -1) {
// We've reached the end of the stream.
return;
}
boolean isLastChunk = !currentManifest.isLive && chunkIndex == streamElement.chunkCount - 1;
long chunkStartTimeUs = streamElement.getStartTimeUs(chunkIndex);
long nextChunkStartTimeUs = isLastChunk ? -1
: chunkStartTimeUs + streamElement.getChunkDurationUs(chunkIndex);
int currentAbsoluteChunkIndex = chunkIndex + currentManifestChunkOffset;
Uri uri = streamElement.buildRequestUri(selectedFormat.trackIndex, chunkIndex);
Chunk mediaChunk = newMediaChunk(selectedFormat, uri, null,
extractors.get(Integer.parseInt(selectedFormat.id)), dataSource, nextChunkIndex,
isLastChunk, streamElement.getStartTimeUs(nextChunkIndex),
isLastChunk ? -1 : streamElement.getStartTimeUs(nextChunkIndex + 1), 0);
extractors.get(Integer.parseInt(selectedFormat.id)), psshInfo, dataSource,
currentAbsoluteChunkIndex, isLastChunk, chunkStartTimeUs, nextChunkStartTimeUs, 0);
out.chunk = mediaChunk;
}
@Override
public IOException getError() {
return null;
return fatalError != null ? fatalError
: (manifestFetcher != null ? manifestFetcher.getError() : null);
}
@Override
@ -203,6 +315,30 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// Do nothing.
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @return The seek position in microseconds.
*/
private long getLiveSeekPosition() {
long liveEdgeTimestampUs = Long.MIN_VALUE;
for (int i = 0; i < currentManifest.streamElements.length; i++) {
StreamElement streamElement = currentManifest.streamElements[i];
if (streamElement.chunkCount > 0) {
long elementLiveEdgeTimestampUs =
streamElement.getStartTimeUs(streamElement.chunkCount - 1)
+ streamElement.getChunkDurationUs(streamElement.chunkCount - 1);
liveEdgeTimestampUs = Math.max(liveEdgeTimestampUs, elementLiveEdgeTimestampUs);
}
}
return liveEdgeTimestampUs - liveEdgeLatencyUs;
}
private StreamElement getElement(SmoothStreamingManifest manifest) {
return manifest.streamElements[streamElementIndex];
}
private static MediaFormat getMediaFormat(StreamElement streamElement, int trackIndex) {
TrackElement trackElement = streamElement.tracks[trackIndex];
String mimeType = trackElement.mimeType;
@ -228,7 +364,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
}
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey,
Extractor extractor, DataSource dataSource, int chunkIndex,
Extractor extractor, Map<UUID, byte[]> psshInfo, DataSource dataSource, int chunkIndex,
boolean isLast, long chunkStartTimeUs, long nextChunkStartTimeUs, int trigger) {
int nextChunkIndex = isLast ? -1 : chunkIndex + 1;
long nextStartTimeUs = isLast ? -1 : nextChunkStartTimeUs;
@ -237,7 +373,7 @@ public class SmoothStreamingChunkSource implements ChunkSource {
// In SmoothStreaming each chunk contains sample timestamps relative to the start of the chunk.
// To convert them the absolute timestamps, we need to set sampleOffsetUs to -chunkStartTimeUs.
return new Mp4MediaChunk(dataSource, dataSpec, formatInfo, trigger, chunkStartTimeUs,
nextStartTimeUs, nextChunkIndex, extractor, false, -chunkStartTimeUs);
nextStartTimeUs, nextChunkIndex, extractor, psshInfo, false, -chunkStartTimeUs);
}
private static byte[] getKeyId(byte[] initData) {

View File

@ -15,9 +15,13 @@
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import android.net.Uri;
import java.util.List;
import java.util.UUID;
/**
@ -28,34 +32,77 @@ import java.util.UUID;
*/
public class SmoothStreamingManifest {
/**
* The client manifest major version.
*/
public final int majorVersion;
public final int minorVersion;
public final long timeScale;
public final int lookAheadCount;
public final ProtectionElement protectionElement;
public final StreamElement[] streamElements;
private final long duration;
public SmoothStreamingManifest(int majorVersion, int minorVersion, long timeScale, long duration,
int lookAheadCount, ProtectionElement protectionElement, StreamElement[] streamElements) {
this.majorVersion = majorVersion;
this.minorVersion = minorVersion;
this.timeScale = timeScale;
this.duration = duration;
this.lookAheadCount = lookAheadCount;
this.protectionElement = protectionElement;
this.streamElements = streamElements;
}
/**
* Gets the duration of the media.
*
*
* @return The duration of the media, in microseconds.
* The client manifest minor version.
*/
public long getDurationUs() {
return (duration * 1000000L) / timeScale;
public final int minorVersion;
/**
* The number of fragments in a lookahead, or -1 if the lookahead is unspecified.
*/
public final int lookAheadCount;
/**
* True if the manifest describes a live presentation still in progress. False otherwise.
*/
public final boolean isLive;
/**
* Content protection information, or null if the content is not protected.
*/
public final ProtectionElement protectionElement;
/**
* The contained stream elements.
*/
public final StreamElement[] streamElements;
/**
* The overall presentation duration of the media in microseconds, or {@link C#UNKNOWN_TIME_US}
* if the duration is unknown.
*/
public final long durationUs;
/**
* The length of the trailing window for a live broadcast in microseconds, or
* {@link C#UNKNOWN_TIME_US} if the stream is not live or if the window length is unspecified.
*/
public final long dvrWindowLengthUs;
/**
* @param majorVersion The client manifest major version.
* @param minorVersion The client manifest minor version.
* @param timescale The timescale of the media as the number of units that pass in one second.
* @param duration The overall presentation duration in units of the timescale attribute, or 0
* if the duration is unknown.
* @param dvrWindowLength The length of the trailing window in units of the timescale attribute,
* or 0 if this attribute is unspecified or not applicable.
* @param lookAheadCount The number of fragments in a lookahead, or -1 if this attribute is
* unspecified or not applicable.
* @param isLive True if the manifest describes a live presentation still in progress. False
* otherwise.
* @param protectionElement Content protection information, or null if the content is not
* protected.
* @param streamElements The contained stream elements.
*/
public SmoothStreamingManifest(int majorVersion, int minorVersion, long timescale, long duration,
long dvrWindowLength, int lookAheadCount, boolean isLive, ProtectionElement protectionElement,
StreamElement[] streamElements) {
this.majorVersion = majorVersion;
this.minorVersion = minorVersion;
this.lookAheadCount = lookAheadCount;
this.isLive = isLive;
this.protectionElement = protectionElement;
this.streamElements = streamElements;
dvrWindowLengthUs = dvrWindowLength == 0 ? C.UNKNOWN_TIME_US
: Util.scaleLargeTimestamp(dvrWindowLength, C.MICROS_PER_SECOND, timescale);
durationUs = duration == 0 ? C.UNKNOWN_TIME_US
: Util.scaleLargeTimestamp(duration, C.MICROS_PER_SECOND, timescale);
}
/**
@ -83,12 +130,9 @@ public class SmoothStreamingManifest {
public final int bitrate;
// Audio-video
public final String fourCC;
public final byte[][] csd;
public final int profile;
public final int level;
// Audio-video (derived)
public final String mimeType;
// Video-only
@ -105,12 +149,12 @@ public class SmoothStreamingManifest {
public final int nalUnitLengthField;
public final String content;
public TrackElement(int index, int bitrate, String fourCC, byte[][] csd, int profile, int level,
int maxWidth, int maxHeight, int sampleRate, int channels, int packetSize, int audioTag,
int bitPerSample, int nalUnitLengthField, String content) {
public TrackElement(int index, int bitrate, String mimeType, byte[][] csd, int profile,
int level, int maxWidth, int maxHeight, int sampleRate, int channels, int packetSize,
int audioTag, int bitPerSample, int nalUnitLengthField, String content) {
this.index = index;
this.bitrate = bitrate;
this.fourCC = fourCC;
this.mimeType = mimeType;
this.csd = csd;
this.profile = profile;
this.level = level;
@ -123,19 +167,6 @@ public class SmoothStreamingManifest {
this.bitPerSample = bitPerSample;
this.nalUnitLengthField = nalUnitLengthField;
this.content = content;
this.mimeType = fourCCToMimeType(fourCC);
}
private static String fourCCToMimeType(String fourCC) {
if (fourCC.equalsIgnoreCase("H264") || fourCC.equalsIgnoreCase("AVC1")
|| fourCC.equalsIgnoreCase("DAVC")) {
return MimeTypes.VIDEO_H264;
} else if (fourCC.equalsIgnoreCase("AACL") || fourCC.equalsIgnoreCase("AACH")) {
return MimeTypes.AUDIO_AAC;
} else if (fourCC.equalsIgnoreCase("TTML")) {
return MimeTypes.APPLICATION_TTML;
}
return null;
}
}
@ -155,10 +186,9 @@ public class SmoothStreamingManifest {
public final int type;
public final String subType;
public final long timeScale;
public final long timescale;
public final String name;
public final int qualityLevels;
public final String url;
public final int maxWidth;
public final int maxHeight;
public final int displayWidth;
@ -167,25 +197,36 @@ public class SmoothStreamingManifest {
public final TrackElement[] tracks;
public final int chunkCount;
private final long[] chunkStartTimes;
private final Uri baseUri;
private final String chunkTemplate;
public StreamElement(int type, String subType, long timeScale, String name,
int qualityLevels, String url, int maxWidth, int maxHeight, int displayWidth,
int displayHeight, String language, TrackElement[] tracks, long[] chunkStartTimes) {
private final List<Long> chunkStartTimes;
private final long[] chunkStartTimesUs;
private final long lastChunkDurationUs;
public StreamElement(Uri baseUri, String chunkTemplate, int type, String subType,
long timescale, String name, int qualityLevels, int maxWidth, int maxHeight,
int displayWidth, int displayHeight, String language, TrackElement[] tracks,
List<Long> chunkStartTimes, long lastChunkDuration) {
this.baseUri = baseUri;
this.chunkTemplate = chunkTemplate;
this.type = type;
this.subType = subType;
this.timeScale = timeScale;
this.timescale = timescale;
this.name = name;
this.qualityLevels = qualityLevels;
this.url = url;
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
this.displayWidth = displayWidth;
this.displayHeight = displayHeight;
this.language = language;
this.tracks = tracks;
this.chunkCount = chunkStartTimes.length;
this.chunkCount = chunkStartTimes.size();
this.chunkStartTimes = chunkStartTimes;
lastChunkDurationUs =
Util.scaleLargeTimestamp(lastChunkDuration, C.MICROS_PER_SECOND, timescale);
chunkStartTimesUs =
Util.scaleLargeTimestamps(chunkStartTimes, C.MICROS_PER_SECOND, timescale);
}
/**
@ -195,7 +236,7 @@ public class SmoothStreamingManifest {
* @return The index of the corresponding chunk.
*/
public int getChunkIndex(long timeUs) {
return Util.binarySearchFloor(chunkStartTimes, (timeUs * timeScale) / 1000000L, true, true);
return Util.binarySearchFloor(chunkStartTimesUs, timeUs, true, true);
}
/**
@ -205,22 +246,35 @@ public class SmoothStreamingManifest {
* @return The start time of the chunk, in microseconds.
*/
public long getStartTimeUs(int chunkIndex) {
return (chunkStartTimes[chunkIndex] * 1000000L) / timeScale;
return chunkStartTimesUs[chunkIndex];
}
/**
* Builds a URL for requesting the specified chunk of the specified track.
* Gets the duration of the specified chunk.
*
* @param chunkIndex The index of the chunk.
* @return The duration of the chunk, in microseconds.
*/
public long getChunkDurationUs(int chunkIndex) {
return (chunkIndex == chunkCount - 1) ? lastChunkDurationUs
: chunkStartTimesUs[chunkIndex + 1] - chunkStartTimesUs[chunkIndex];
}
/**
* Builds a uri for requesting the specified chunk of the specified track.
*
* @param track The index of the track for which to build the URL.
* @param chunkIndex The index of the chunk for which to build the URL.
* @return The request URL.
* @return The request uri.
*/
public String buildRequestUrl(int track, int chunkIndex) {
assert (tracks != null);
assert (chunkStartTimes != null);
assert (chunkIndex < chunkStartTimes.length);
return url.replace(URL_PLACEHOLDER_BITRATE, Integer.toString(tracks[track].bitrate))
.replace(URL_PLACEHOLDER_START_TIME, Long.toString(chunkStartTimes[chunkIndex]));
public Uri buildRequestUri(int track, int chunkIndex) {
Assertions.checkState(tracks != null);
Assertions.checkState(chunkStartTimes != null);
Assertions.checkState(chunkIndex < chunkStartTimes.size());
String chunkUrl = chunkTemplate
.replace(URL_PLACEHOLDER_BITRATE, Integer.toString(tracks[track].bitrate))
.replace(URL_PLACEHOLDER_START_TIME, Long.toString(chunkStartTimes.get(chunkIndex)));
return Util.getMergedUri(baseUri, chunkUrl);
}
}

View File

@ -1,63 +0,0 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.smoothstreaming;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.util.ManifestFetcher;
import android.net.Uri;
import java.io.IOException;
import java.io.InputStream;
/**
* A concrete implementation of {@link ManifestFetcher} for loading SmoothStreaming
* manifests.
* <p>
* This class is provided for convenience, however it is expected that most applications will
* contain their own mechanisms for making asynchronous network requests and parsing the response.
* In such cases it is recommended that application developers use their existing solution rather
* than this one.
*/
public final class SmoothStreamingManifestFetcher extends ManifestFetcher<SmoothStreamingManifest> {
private final SmoothStreamingManifestParser parser;
/**
* @param callback The callback to provide with the parsed manifest (or error).
*/
public SmoothStreamingManifestFetcher(ManifestCallback<SmoothStreamingManifest> callback) {
super(callback);
parser = new SmoothStreamingManifestParser();
}
/**
* @param callback The callback to provide with the parsed manifest (or error).
* @param timeoutMillis The timeout in milliseconds for the connection used to load the data.
*/
public SmoothStreamingManifestFetcher(ManifestCallback<SmoothStreamingManifest> callback,
int timeoutMillis) {
super(callback, timeoutMillis);
parser = new SmoothStreamingManifestParser();
}
@Override
protected SmoothStreamingManifest parse(InputStream stream, String inputEncoding,
String contentId, Uri baseUrl) throws IOException, ParserException {
return parser.parse(stream, inputEncoding);
}
}

View File

@ -21,7 +21,10 @@ import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.Stre
import com.google.android.exoplayer.smoothstreaming.SmoothStreamingManifest.TrackElement;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.ManifestParser;
import com.google.android.exoplayer.util.MimeTypes;
import android.net.Uri;
import android.util.Base64;
import android.util.Pair;
@ -31,6 +34,7 @@ import org.xmlpull.v1.XmlPullParserFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
@ -41,7 +45,7 @@ import java.util.UUID;
* @see <a href="http://msdn.microsoft.com/en-us/library/ee673436(v=vs.90).aspx">
* IIS Smooth Streaming Client Manifest Format</a>
*/
public class SmoothStreamingManifestParser {
public class SmoothStreamingManifestParser implements ManifestParser<SmoothStreamingManifest> {
private final XmlPullParserFactory xmlParserFactory;
@ -53,21 +57,13 @@ public class SmoothStreamingManifestParser {
}
}
/**
* Parses a manifest from the provided {@link InputStream}.
*
* @param inputStream The stream from which to parse the manifest.
* @param inputEncoding The encoding of the input.
* @return The parsed manifest.
* @throws IOException If a problem occurred reading from the stream.
* @throws ParserException If a problem occurred parsing the xml as a smooth streaming manifest.
*/
public SmoothStreamingManifest parse(InputStream inputStream, String inputEncoding) throws
IOException, ParserException {
@Override
public SmoothStreamingManifest parse(InputStream inputStream, String inputEncoding,
String contentId, Uri baseUri) throws IOException, ParserException {
try {
XmlPullParser xmlParser = xmlParserFactory.newPullParser();
xmlParser.setInput(inputStream, inputEncoding);
SmoothStreamMediaParser smoothStreamMediaParser = new SmoothStreamMediaParser(null);
SmoothStreamMediaParser smoothStreamMediaParser = new SmoothStreamMediaParser(null, baseUri);
return (SmoothStreamingManifest) smoothStreamMediaParser.parse(xmlParser);
} catch (XmlPullParserException e) {
throw new ParserException(e);
@ -90,14 +86,16 @@ public class SmoothStreamingManifestParser {
*/
private static abstract class ElementParser {
private final Uri baseUri;
private final String tag;
private final ElementParser parent;
private final List<Pair<String, Object>> normalizedAttributes;
public ElementParser(String tag, ElementParser parent) {
this.tag = tag;
public ElementParser(ElementParser parent, Uri baseUri, String tag) {
this.parent = parent;
this.baseUri = baseUri;
this.tag = tag;
this.normalizedAttributes = new LinkedList<Pair<String, Object>>();
}
@ -120,7 +118,7 @@ public class SmoothStreamingManifestParser {
} else if (handleChildInline(tagName)) {
parseStartTag(xmlParser);
} else {
ElementParser childElementParser = newChildParser(this, tagName);
ElementParser childElementParser = newChildParser(this, tagName, baseUri);
if (childElementParser == null) {
skippingElementDepth = 1;
} else {
@ -157,13 +155,13 @@ public class SmoothStreamingManifestParser {
}
}
private ElementParser newChildParser(ElementParser parent, String name) {
private ElementParser newChildParser(ElementParser parent, String name, Uri baseUri) {
if (TrackElementParser.TAG.equals(name)) {
return new TrackElementParser(parent);
return new TrackElementParser(parent, baseUri);
} else if (ProtectionElementParser.TAG.equals(name)) {
return new ProtectionElementParser(parent);
return new ProtectionElementParser(parent, baseUri);
} else if (StreamElementParser.TAG.equals(name)) {
return new StreamElementParser(parent);
return new StreamElementParser(parent, baseUri);
}
return null;
}
@ -308,6 +306,15 @@ public class SmoothStreamingManifestParser {
}
}
protected final boolean parseBoolean(XmlPullParser parser, String key, boolean defaultValue) {
String value = parser.getAttributeValue(null, key);
if (value != null) {
return Boolean.parseBoolean(value);
} else {
return defaultValue;
}
}
}
private static class SmoothStreamMediaParser extends ElementParser {
@ -317,19 +324,23 @@ public class SmoothStreamingManifestParser {
private static final String KEY_MAJOR_VERSION = "MajorVersion";
private static final String KEY_MINOR_VERSION = "MinorVersion";
private static final String KEY_TIME_SCALE = "TimeScale";
private static final String KEY_DVR_WINDOW_LENGTH = "DVRWindowLength";
private static final String KEY_DURATION = "Duration";
private static final String KEY_LOOKAHEAD_COUNT = "LookaheadCount";
private static final String KEY_IS_LIVE = "IsLive";
private int majorVersion;
private int minorVersion;
private long timeScale;
private long timescale;
private long duration;
private long dvrWindowLength;
private int lookAheadCount;
private boolean isLive;
private ProtectionElement protectionElement;
private List<StreamElement> streamElements;
public SmoothStreamMediaParser(ElementParser parent) {
super(TAG, parent);
public SmoothStreamMediaParser(ElementParser parent, Uri baseUri) {
super(parent, baseUri, TAG);
lookAheadCount = -1;
protectionElement = null;
streamElements = new LinkedList<StreamElement>();
@ -339,10 +350,12 @@ public class SmoothStreamingManifestParser {
public void parseStartTag(XmlPullParser parser) throws ParserException {
majorVersion = parseRequiredInt(parser, KEY_MAJOR_VERSION);
minorVersion = parseRequiredInt(parser, KEY_MINOR_VERSION);
timeScale = parseLong(parser, KEY_TIME_SCALE, 10000000L);
timescale = parseLong(parser, KEY_TIME_SCALE, 10000000L);
duration = parseRequiredLong(parser, KEY_DURATION);
dvrWindowLength = parseLong(parser, KEY_DVR_WINDOW_LENGTH, 0);
lookAheadCount = parseInt(parser, KEY_LOOKAHEAD_COUNT, -1);
putNormalizedAttribute(KEY_TIME_SCALE, timeScale);
isLive = parseBoolean(parser, KEY_IS_LIVE, false);
putNormalizedAttribute(KEY_TIME_SCALE, timescale);
}
@Override
@ -359,8 +372,8 @@ public class SmoothStreamingManifestParser {
public Object build() {
StreamElement[] streamElementArray = new StreamElement[streamElements.size()];
streamElements.toArray(streamElementArray);
return new SmoothStreamingManifest(majorVersion, minorVersion, timeScale, duration,
lookAheadCount, protectionElement, streamElementArray);
return new SmoothStreamingManifest(majorVersion, minorVersion, timescale, duration,
dvrWindowLength, lookAheadCount, isLive, protectionElement, streamElementArray);
}
}
@ -376,8 +389,8 @@ public class SmoothStreamingManifestParser {
private UUID uuid;
private byte[] initData;
public ProtectionElementParser(ElementParser parent) {
super(TAG, parent);
public ProtectionElementParser(ElementParser parent, Uri baseUri) {
super(parent, baseUri, TAG);
}
@Override
@ -426,7 +439,6 @@ public class SmoothStreamingManifestParser {
private static final String KEY_TYPE_TEXT = "text";
private static final String KEY_SUB_TYPE = "Subtype";
private static final String KEY_NAME = "Name";
private static final String KEY_CHUNKS = "Chunks";
private static final String KEY_QUALITY_LEVELS = "QualityLevels";
private static final String KEY_URL = "Url";
private static final String KEY_MAX_WIDTH = "MaxWidth";
@ -438,12 +450,14 @@ public class SmoothStreamingManifestParser {
private static final String KEY_FRAGMENT_DURATION = "d";
private static final String KEY_FRAGMENT_START_TIME = "t";
private static final String KEY_FRAGMENT_REPEAT_COUNT = "r";
private final Uri baseUri;
private final List<TrackElement> tracks;
private int type;
private String subType;
private long timeScale;
private long timescale;
private String name;
private int qualityLevels;
private String url;
@ -452,13 +466,13 @@ public class SmoothStreamingManifestParser {
private int displayWidth;
private int displayHeight;
private String language;
private long[] startTimes;
private ArrayList<Long> startTimes;
private int chunkIndex;
private long previousChunkDuration;
private long lastChunkDuration;
public StreamElementParser(ElementParser parent) {
super(TAG, parent);
public StreamElementParser(ElementParser parent, Uri baseUri) {
super(parent, baseUri, TAG);
this.baseUri = baseUri;
tracks = new LinkedList<TrackElement>();
}
@ -477,21 +491,32 @@ public class SmoothStreamingManifestParser {
}
private void parseStreamFragmentStartTag(XmlPullParser parser) throws ParserException {
startTimes[chunkIndex] = parseLong(parser, KEY_FRAGMENT_START_TIME, -1L);
if (startTimes[chunkIndex] == -1L) {
int chunkIndex = startTimes.size();
long startTime = parseLong(parser, KEY_FRAGMENT_START_TIME, -1L);
if (startTime == -1L) {
if (chunkIndex == 0) {
// Assume the track starts at t = 0.
startTimes[chunkIndex] = 0;
} else if (previousChunkDuration != -1L) {
startTime = 0;
} else if (lastChunkDuration != -1L) {
// Infer the start time from the previous chunk's start time and duration.
startTimes[chunkIndex] = startTimes[chunkIndex - 1] + previousChunkDuration;
startTime = startTimes.get(chunkIndex - 1) + lastChunkDuration;
} else {
// We don't have the start time, and we're unable to infer it.
throw new ParserException("Unable to infer start time");
}
}
previousChunkDuration = parseLong(parser, KEY_FRAGMENT_DURATION, -1L);
chunkIndex++;
startTimes.add(startTime);
lastChunkDuration = parseLong(parser, KEY_FRAGMENT_DURATION, -1L);
// Handle repeated chunks.
long repeatCount = parseLong(parser, KEY_FRAGMENT_REPEAT_COUNT, 1L);
if (repeatCount > 1 && lastChunkDuration == -1L) {
throw new ParserException("Repeated chunk with unspecified duration");
}
for (int i = 1; i < repeatCount; i++) {
chunkIndex++;
startTimes.add(startTime + (lastChunkDuration * i));
}
}
private void parseStreamElementStartTag(XmlPullParser parser) throws ParserException {
@ -510,11 +535,11 @@ public class SmoothStreamingManifestParser {
displayWidth = parseInt(parser, KEY_DISPLAY_WIDTH, -1);
displayHeight = parseInt(parser, KEY_DISPLAY_HEIGHT, -1);
language = parser.getAttributeValue(null, KEY_LANGUAGE);
timeScale = parseInt(parser, KEY_TIME_SCALE, -1);
if (timeScale == -1) {
timeScale = (Long) getNormalizedAttribute(KEY_TIME_SCALE);
timescale = parseInt(parser, KEY_TIME_SCALE, -1);
if (timescale == -1) {
timescale = (Long) getNormalizedAttribute(KEY_TIME_SCALE);
}
startTimes = new long[parseRequiredInt(parser, KEY_CHUNKS)];
startTimes = new ArrayList<Long>();
}
private int parseType(XmlPullParser parser) throws ParserException {
@ -544,8 +569,9 @@ public class SmoothStreamingManifestParser {
public Object build() {
TrackElement[] trackElements = new TrackElement[tracks.size()];
tracks.toArray(trackElements);
return new StreamElement(type, subType, timeScale, name, qualityLevels, url, maxWidth,
maxHeight, displayWidth, displayHeight, language, trackElements, startTimes);
return new StreamElement(baseUri, url, type, subType, timescale, name, qualityLevels,
maxWidth, maxHeight, displayWidth, displayHeight, language, trackElements, startTimes,
lastChunkDuration);
}
}
@ -572,7 +598,7 @@ public class SmoothStreamingManifestParser {
private int index;
private int bitrate;
private String fourCC;
private String mimeType;
private int profile;
private int level;
private int maxWidth;
@ -586,8 +612,8 @@ public class SmoothStreamingManifestParser {
private int nalUnitLengthField;
private String content;
public TrackElementParser(ElementParser parent) {
super(TAG, parent);
public TrackElementParser(ElementParser parent, Uri baseUri) {
super(parent, baseUri, TAG);
this.csd = new LinkedList<byte[]>();
}
@ -604,11 +630,14 @@ public class SmoothStreamingManifestParser {
if (type == StreamElement.TYPE_VIDEO) {
maxHeight = parseRequiredInt(parser, KEY_MAX_HEIGHT);
maxWidth = parseRequiredInt(parser, KEY_MAX_WIDTH);
fourCC = parseRequiredString(parser, KEY_FOUR_CC);
mimeType = fourCCToMimeType(parseRequiredString(parser, KEY_FOUR_CC));
} else {
maxHeight = -1;
maxWidth = -1;
fourCC = parser.getAttributeValue(null, KEY_FOUR_CC);
String fourCC = parser.getAttributeValue(null, KEY_FOUR_CC);
// If fourCC is missing and the stream type is audio, we assume AAC.
mimeType = fourCC != null ? fourCCToMimeType(fourCC)
: type == StreamElement.TYPE_AUDIO ? MimeTypes.AUDIO_AAC : null;
}
if (type == StreamElement.TYPE_AUDIO) {
@ -644,17 +673,6 @@ public class SmoothStreamingManifestParser {
}
}
private byte[] hexStringToByteArray(String hexString) {
int length = hexString.length();
byte[] data = new byte[length / 2];
for (int i = 0; i < data.length; i++) {
int stringOffset = i * 2;
data[i] = (byte) ((Character.digit(hexString.charAt(stringOffset), 16) << 4)
+ Character.digit(hexString.charAt(stringOffset + 1), 16));
}
return data;
}
@Override
public void parseText(XmlPullParser parser) {
content = parser.getText();
@ -667,8 +685,33 @@ public class SmoothStreamingManifestParser {
csdArray = new byte[csd.size()][];
csd.toArray(csdArray);
}
return new TrackElement(index, bitrate, fourCC, csdArray, profile, level, maxWidth, maxHeight,
samplingRate, channels, packetSize, audioTag, bitPerSample, nalUnitLengthField, content);
return new TrackElement(index, bitrate, mimeType, csdArray, profile, level, maxWidth,
maxHeight, samplingRate, channels, packetSize, audioTag, bitPerSample, nalUnitLengthField,
content);
}
private static String fourCCToMimeType(String fourCC) {
if (fourCC.equalsIgnoreCase("H264") || fourCC.equalsIgnoreCase("X264")
|| fourCC.equalsIgnoreCase("AVC1") || fourCC.equalsIgnoreCase("DAVC")) {
return MimeTypes.VIDEO_H264;
} else if (fourCC.equalsIgnoreCase("AAC") || fourCC.equalsIgnoreCase("AACL")
|| fourCC.equalsIgnoreCase("AACH") || fourCC.equalsIgnoreCase("AACP")) {
return MimeTypes.AUDIO_AAC;
} else if (fourCC.equalsIgnoreCase("TTML")) {
return MimeTypes.APPLICATION_TTML;
}
return null;
}
private static byte[] hexStringToByteArray(String hexString) {
int length = hexString.length();
byte[] data = new byte[length / 2];
for (int i = 0; i < data.length; i++) {
int stringOffset = i * 2;
data[i] = (byte) ((Character.digit(hexString.charAt(stringOffset), 16) << 4)
+ Character.digit(hexString.charAt(stringOffset + 1), 16));
}
return data;
}
}

View File

@ -0,0 +1,160 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.text;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.graphics.Color;
import android.graphics.Typeface;
import android.view.accessibility.CaptioningManager;
import android.view.accessibility.CaptioningManager.CaptionStyle;
/**
* A compatibility wrapper for {@link CaptionStyle}.
*/
public final class CaptionStyleCompat {
/**
* Edge type value specifying no character edges.
*/
public static final int EDGE_TYPE_NONE = 0;
/**
* Edge type value specifying uniformly outlined character edges.
*/
public static final int EDGE_TYPE_OUTLINE = 1;
/**
* Edge type value specifying drop-shadowed character edges.
*/
public static final int EDGE_TYPE_DROP_SHADOW = 2;
/**
* Edge type value specifying raised bevel character edges.
*/
public static final int EDGE_TYPE_RAISED = 3;
/**
* Edge type value specifying depressed bevel character edges.
*/
public static final int EDGE_TYPE_DEPRESSED = 4;
/**
* Use color setting specified by the track and fallback to default caption style.
*/
public static final int USE_TRACK_COLOR_SETTINGS = 1;
/**
* Default caption style.
*/
public static final CaptionStyleCompat DEFAULT = new CaptionStyleCompat(
Color.WHITE, Color.BLACK, Color.TRANSPARENT, EDGE_TYPE_NONE, Color.WHITE, null);
/**
* The preferred foreground color.
*/
public final int foregroundColor;
/**
* The preferred background color.
*/
public final int backgroundColor;
/**
* The preferred window color.
*/
public final int windowColor;
/**
* The preferred edge type. One of:
* <ul>
* <li>{@link #EDGE_TYPE_NONE}
* <li>{@link #EDGE_TYPE_OUTLINE}
* <li>{@link #EDGE_TYPE_DROP_SHADOW}
* <li>{@link #EDGE_TYPE_RAISED}
* <li>{@link #EDGE_TYPE_DEPRESSED}
* </ul>
*/
public final int edgeType;
/**
* The preferred edge color, if using an edge type other than {@link #EDGE_TYPE_NONE}.
*/
public final int edgeColor;
/**
* The preferred typeface.
*/
public final Typeface typeface;
/**
* Creates a {@link CaptionStyleCompat} equivalent to a provided {@link CaptionStyle}.
*
* @param captionStyle A {@link CaptionStyle}.
* @return The equivalent {@link CaptionStyleCompat}.
*/
@TargetApi(19)
public static CaptionStyleCompat createFromCaptionStyle(
CaptioningManager.CaptionStyle captionStyle) {
if (Util.SDK_INT >= 21) {
return createFromCaptionStyleV21(captionStyle);
} else {
// Note - Any caller must be on at least API level 19 or greater (because CaptionStyle did
// not exist in earlier API levels).
return createFromCaptionStyleV19(captionStyle);
}
}
/**
* @param foregroundColor See {@link #foregroundColor}.
* @param backgroundColor See {@link #backgroundColor}.
* @param windowColor See {@link #windowColor}.
* @param edgeType See {@link #edgeType}.
* @param edgeColor See {@link #edgeColor}.
* @param typeface See {@link #typeface}.
*/
public CaptionStyleCompat(int foregroundColor, int backgroundColor, int windowColor, int edgeType,
int edgeColor, Typeface typeface) {
this.foregroundColor = foregroundColor;
this.backgroundColor = backgroundColor;
this.windowColor = windowColor;
this.edgeType = edgeType;
this.edgeColor = edgeColor;
this.typeface = typeface;
}
@TargetApi(19)
private static CaptionStyleCompat createFromCaptionStyleV19(
CaptioningManager.CaptionStyle captionStyle) {
return new CaptionStyleCompat(
captionStyle.foregroundColor, captionStyle.backgroundColor, Color.TRANSPARENT,
captionStyle.edgeType, captionStyle.edgeColor, captionStyle.getTypeface());
}
@TargetApi(21)
private static CaptionStyleCompat createFromCaptionStyleV21(
CaptioningManager.CaptionStyle captionStyle) {
return new CaptionStyleCompat(
captionStyle.hasForegroundColor() ? captionStyle.foregroundColor : DEFAULT.foregroundColor,
captionStyle.hasBackgroundColor() ? captionStyle.backgroundColor : DEFAULT.backgroundColor,
captionStyle.hasWindowColor() ? captionStyle.windowColor : DEFAULT.windowColor,
captionStyle.hasEdgeType() ? captionStyle.edgeType : DEFAULT.edgeType,
captionStyle.hasEdgeColor() ? captionStyle.edgeColor : DEFAULT.edgeColor,
captionStyle.getTypeface());
}
}

View File

@ -0,0 +1,147 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.text;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.util.Assertions;
import android.media.MediaCodec;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Wraps a {@link SubtitleParser}, exposing an interface similar to {@link MediaCodec} for
* asynchronous parsing of subtitles.
*/
public class SubtitleParserHelper implements Handler.Callback {
private final SubtitleParser parser;
private final Handler handler;
private SampleHolder sampleHolder;
private boolean parsing;
private Subtitle result;
private IOException error;
/**
* @param looper The {@link Looper} associated with the thread on which parsing should occur.
* @param parser The parser that should be used to parse the raw data.
*/
public SubtitleParserHelper(Looper looper, SubtitleParser parser) {
this.handler = new Handler(looper, this);
this.parser = parser;
flush();
}
/**
* Flushes the helper, canceling the current parsing operation, if there is one.
*/
public synchronized void flush() {
sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
parsing = false;
result = null;
error = null;
}
/**
* Whether the helper is currently performing a parsing operation.
*
* @return True if the helper is currently performing a parsing operation. False otherwise.
*/
public synchronized boolean isParsing() {
return parsing;
}
/**
* Gets the holder that should be populated with data to be parsed.
* <p>
* The returned holder will remain valid unless {@link #flush()} is called. If {@link #flush()}
* is called the holder is replaced, and this method should be called again to obtain the new
* holder.
*
* @return The holder that should be populated with data to be parsed.
*/
public synchronized SampleHolder getSampleHolder() {
return sampleHolder;
}
/**
* Start a parsing operation.
* <p>
* The holder returned by {@link #getSampleHolder()} should be populated with the data to be
* parsed prior to calling this method.
*/
public synchronized void startParseOperation() {
Assertions.checkState(!parsing);
parsing = true;
result = null;
error = null;
handler.obtainMessage(0, sampleHolder).sendToTarget();
}
/**
* Gets the result of the most recent parsing operation.
* <p>
* The result is cleared as a result of calling this method, and so subsequent calls will return
* null until a subsequent parsing operation has finished.
*
* @return The result of the parsing operation, or null.
* @throws IOException If the parsing operation failed.
*/
public synchronized Subtitle getAndClearResult() throws IOException {
try {
if (error != null) {
throw error;
}
return result;
} finally {
error = null;
result = null;
}
}
@Override
public boolean handleMessage(Message msg) {
Subtitle result;
IOException error;
SampleHolder holder = (SampleHolder) msg.obj;
try {
InputStream inputStream = new ByteArrayInputStream(holder.data.array(), 0, holder.size);
result = parser.parse(inputStream, null, sampleHolder.timeUs);
error = null;
} catch (IOException e) {
result = null;
error = e;
}
synchronized (this) {
if (sampleHolder != holder) {
// A flush has occurred since this holder was posted. Do nothing.
} else {
holder.data.position(0);
this.result = result;
this.error = error;
this.parsing = false;
}
}
return true;
}
}

View File

@ -0,0 +1,295 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.text;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Join;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.View;
/**
* A view for rendering captions.
* <p>
* The caption style and text size can be configured using {@link #setStyle(CaptionStyleCompat)} and
* {@link #setTextSize(float)} respectively.
*/
public class SubtitleView extends View {
/**
* Ratio of inner padding to font size.
*/
private static final float INNER_PADDING_RATIO = 0.125f;
/**
* Temporary rectangle used for computing line bounds.
*/
private final RectF lineBounds = new RectF();
/**
* Reusable string builder used for holding text.
*/
private final StringBuilder textBuilder = new StringBuilder();
// Styled dimensions.
private final float cornerRadius;
private final float outlineWidth;
private final float shadowRadius;
private final float shadowOffset;
private TextPaint textPaint;
private Paint paint;
private int foregroundColor;
private int backgroundColor;
private int edgeColor;
private int edgeType;
private boolean hasMeasurements;
private int lastMeasuredWidth;
private StaticLayout layout;
private float spacingMult;
private float spacingAdd;
private int innerPaddingX;
public SubtitleView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public SubtitleView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
int[] viewAttr = {android.R.attr.text, android.R.attr.textSize,
android.R.attr.lineSpacingExtra, android.R.attr.lineSpacingMultiplier};
TypedArray a = context.obtainStyledAttributes(attrs, viewAttr, defStyleAttr, 0);
CharSequence text = a.getText(0);
int textSize = a.getDimensionPixelSize(1, 15);
spacingAdd = a.getDimensionPixelSize(2, 0);
spacingMult = a.getFloat(3, 1);
a.recycle();
Resources resources = getContext().getResources();
DisplayMetrics displayMetrics = resources.getDisplayMetrics();
int twoDpInPx = Math.round((2 * displayMetrics.densityDpi) / DisplayMetrics.DENSITY_DEFAULT);
cornerRadius = twoDpInPx;
outlineWidth = twoDpInPx;
shadowRadius = twoDpInPx;
shadowOffset = twoDpInPx;
textPaint = new TextPaint();
textPaint.setAntiAlias(true);
textPaint.setSubpixelText(true);
paint = new Paint();
paint.setAntiAlias(true);
innerPaddingX = 0;
setText(text);
setTextSize(textSize);
setStyle(CaptionStyleCompat.DEFAULT);
}
public SubtitleView(Context context) {
this(context, null);
}
@Override
public void setBackgroundColor(int color) {
backgroundColor = color;
invalidate();
}
/**
* Sets the text to be displayed by the view.
*
* @param text The text to display.
*/
public void setText(CharSequence text) {
textBuilder.setLength(0);
textBuilder.append(text);
hasMeasurements = false;
requestLayout();
}
/**
* Sets the text size in pixels.
*
* @param size The text size in pixels.
*/
public void setTextSize(float size) {
if (textPaint.getTextSize() != size) {
textPaint.setTextSize(size);
innerPaddingX = (int) (size * INNER_PADDING_RATIO + 0.5f);
hasMeasurements = false;
requestLayout();
invalidate();
}
}
/**
* Configures the view according to the given style.
*
* @param style A style for the view.
*/
public void setStyle(CaptionStyleCompat style) {
foregroundColor = style.foregroundColor;
backgroundColor = style.backgroundColor;
edgeType = style.edgeType;
edgeColor = style.edgeColor;
setTypeface(style.typeface);
super.setBackgroundColor(style.windowColor);
hasMeasurements = false;
requestLayout();
}
private void setTypeface(Typeface typeface) {
if (textPaint.getTypeface() != typeface) {
textPaint.setTypeface(typeface);
hasMeasurements = false;
requestLayout();
invalidate();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthSpec = MeasureSpec.getSize(widthMeasureSpec);
if (computeMeasurements(widthSpec)) {
final StaticLayout layout = this.layout;
final int paddingX = getPaddingLeft() + getPaddingRight() + innerPaddingX * 2;
final int height = layout.getHeight() + getPaddingTop() + getPaddingBottom();
int width = 0;
int lineCount = layout.getLineCount();
for (int i = 0; i < lineCount; i++) {
width = Math.max((int) Math.ceil(layout.getLineWidth(i)), width);
}
width += paddingX;
setMeasuredDimension(width, height);
} else if (Util.SDK_INT >= 11) {
setTooSmallMeasureDimensionV11();
} else {
setMeasuredDimension(0, 0);
}
}
@TargetApi(11)
private void setTooSmallMeasureDimensionV11() {
setMeasuredDimension(MEASURED_STATE_TOO_SMALL, MEASURED_STATE_TOO_SMALL);
}
@Override
public void onLayout(boolean changed, int l, int t, int r, int b) {
final int width = r - l;
computeMeasurements(width);
}
private boolean computeMeasurements(int maxWidth) {
if (hasMeasurements && maxWidth == lastMeasuredWidth) {
return true;
}
// Account for padding.
final int paddingX = getPaddingLeft() + getPaddingRight() + innerPaddingX * 2;
maxWidth -= paddingX;
if (maxWidth <= 0) {
return false;
}
hasMeasurements = true;
lastMeasuredWidth = maxWidth;
layout = new StaticLayout(textBuilder, textPaint, maxWidth, null, spacingMult, spacingAdd,
true);
return true;
}
@Override
protected void onDraw(Canvas c) {
final StaticLayout layout = this.layout;
if (layout == null) {
return;
}
final int saveCount = c.save();
final int innerPaddingX = this.innerPaddingX;
c.translate(getPaddingLeft() + innerPaddingX, getPaddingTop());
final int lineCount = layout.getLineCount();
final Paint textPaint = this.textPaint;
final Paint paint = this.paint;
final RectF bounds = lineBounds;
if (Color.alpha(backgroundColor) > 0) {
final float cornerRadius = this.cornerRadius;
float previousBottom = layout.getLineTop(0);
paint.setColor(backgroundColor);
paint.setStyle(Style.FILL);
for (int i = 0; i < lineCount; i++) {
bounds.left = layout.getLineLeft(i) - innerPaddingX;
bounds.right = layout.getLineRight(i) + innerPaddingX;
bounds.top = previousBottom;
bounds.bottom = layout.getLineBottom(i);
previousBottom = bounds.bottom;
c.drawRoundRect(bounds, cornerRadius, cornerRadius, paint);
}
}
if (edgeType == CaptionStyleCompat.EDGE_TYPE_OUTLINE) {
textPaint.setStrokeJoin(Join.ROUND);
textPaint.setStrokeWidth(outlineWidth);
textPaint.setColor(edgeColor);
textPaint.setStyle(Style.FILL_AND_STROKE);
layout.draw(c);
} else if (edgeType == CaptionStyleCompat.EDGE_TYPE_DROP_SHADOW) {
textPaint.setShadowLayer(shadowRadius, shadowOffset, shadowOffset, edgeColor);
} else if (edgeType == CaptionStyleCompat.EDGE_TYPE_RAISED
|| edgeType == CaptionStyleCompat.EDGE_TYPE_DEPRESSED) {
boolean raised = edgeType == CaptionStyleCompat.EDGE_TYPE_RAISED;
int colorUp = raised ? Color.WHITE : edgeColor;
int colorDown = raised ? edgeColor : Color.WHITE;
float offset = shadowRadius / 2f;
textPaint.setColor(foregroundColor);
textPaint.setStyle(Style.FILL);
textPaint.setShadowLayer(shadowRadius, -offset, -offset, colorUp);
layout.draw(c);
textPaint.setShadowLayer(shadowRadius, offset, offset, colorDown);
}
textPaint.setColor(foregroundColor);
textPaint.setStyle(Style.FILL);
layout.draw(c);
textPaint.setShadowLayer(0, 0, 0, 0);
c.restoreToCount(saveCount);
}
}

View File

@ -20,20 +20,16 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.VerboseLogUtil;
import android.annotation.TargetApi;
import android.os.Handler;
import android.os.Handler.Callback;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* A {@link TrackRenderer} for textual subtitles. The actual rendering of each line of text to a
@ -56,14 +52,11 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
}
private static final String TAG = "TextTrackRenderer";
private static final int MSG_UPDATE_OVERLAY = 0;
private final Handler textRendererHandler;
private final TextRenderer textRenderer;
private final SampleSource source;
private final SampleHolder sampleHolder;
private final MediaFormatHolder formatHolder;
private final SubtitleParser subtitleParser;
@ -73,6 +66,8 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
private boolean inputStreamEnded;
private Subtitle subtitle;
private SubtitleParserHelper parserHelper;
private HandlerThread parserThread;
private int nextSubtitleEventIndex;
private boolean textRendererNeedsUpdate;
@ -94,7 +89,6 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
this.textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper,
this);
formatHolder = new MediaFormatHolder();
sampleHolder = new SampleHolder(true);
}
@Override
@ -117,46 +111,66 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
}
@Override
protected void onEnabled(long timeUs, boolean joining) {
source.enable(trackIndex, timeUs);
seekToInternal(timeUs);
protected void onEnabled(long positionUs, boolean joining) {
source.enable(trackIndex, positionUs);
parserThread = new HandlerThread("textParser");
parserThread.start();
parserHelper = new SubtitleParserHelper(parserThread.getLooper(), subtitleParser);
seekToInternal(positionUs);
}
@Override
protected void seekTo(long timeUs) {
source.seekToUs(timeUs);
seekToInternal(timeUs);
protected void seekTo(long positionUs) {
source.seekToUs(positionUs);
seekToInternal(positionUs);
}
private void seekToInternal(long timeUs) {
private void seekToInternal(long positionUs) {
inputStreamEnded = false;
currentPositionUs = timeUs;
source.seekToUs(timeUs);
if (subtitle != null && (timeUs < subtitle.getStartTime()
|| subtitle.getLastEventTime() <= timeUs)) {
currentPositionUs = positionUs;
source.seekToUs(positionUs);
if (subtitle != null && (positionUs < subtitle.getStartTime()
|| subtitle.getLastEventTime() <= positionUs)) {
subtitle = null;
}
resetSampleData();
parserHelper.flush();
clearTextRenderer();
syncNextEventIndex(timeUs);
syncNextEventIndex(positionUs);
textRendererNeedsUpdate = subtitle != null;
}
@Override
protected void doSomeWork(long timeUs) throws ExoPlaybackException {
protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
currentPositionUs = positionUs;
try {
source.continueBuffering(timeUs);
source.continueBuffering(positionUs);
} catch (IOException e) {
throw new ExoPlaybackException(e);
}
currentPositionUs = timeUs;
if (parserHelper.isParsing()) {
return;
}
// We're iterating through the events in a subtitle. Set textRendererNeedsUpdate if we advance
// to the next event.
if (subtitle != null) {
Subtitle dequeuedSubtitle = null;
if (subtitle == null) {
try {
dequeuedSubtitle = parserHelper.getAndClearResult();
} catch (IOException e) {
throw new ExoPlaybackException(e);
}
}
if (subtitle == null && dequeuedSubtitle != null) {
// We've dequeued a new subtitle. Sync the event index and update the subtitle.
subtitle = dequeuedSubtitle;
syncNextEventIndex(positionUs);
textRendererNeedsUpdate = true;
} else if (subtitle != null) {
// We're iterating through the events in a subtitle. Set textRendererNeedsUpdate if we
// advance to the next event.
long nextEventTimeUs = getNextEventTime();
while (nextEventTimeUs <= timeUs) {
while (nextEventTimeUs <= positionUs) {
nextSubtitleEventIndex++;
nextEventTimeUs = getNextEventTime();
textRendererNeedsUpdate = true;
@ -169,27 +183,17 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
// We don't have a subtitle. Try and read the next one from the source, and if we succeed then
// sync and set textRendererNeedsUpdate.
if (subtitle == null) {
boolean resetSampleHolder = false;
if (!inputStreamEnded && subtitle == null) {
try {
int result = source.readData(trackIndex, timeUs, formatHolder, sampleHolder, false);
SampleHolder sampleHolder = parserHelper.getSampleHolder();
int result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false);
if (result == SampleSource.SAMPLE_READ) {
resetSampleHolder = true;
InputStream subtitleInputStream =
new ByteArrayInputStream(sampleHolder.data.array(), 0, sampleHolder.size);
subtitle = subtitleParser.parse(subtitleInputStream, "UTF-8", sampleHolder.timeUs);
syncNextEventIndex(timeUs);
textRendererNeedsUpdate = true;
parserHelper.startParseOperation();
} else if (result == SampleSource.END_OF_STREAM) {
inputStreamEnded = true;
}
} catch (IOException e) {
resetSampleHolder = true;
throw new ExoPlaybackException(e);
} finally {
if (resetSampleHolder) {
resetSampleData();
}
}
}
@ -199,17 +203,19 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
if (subtitle == null) {
clearTextRenderer();
} else {
updateTextRenderer(timeUs);
updateTextRenderer(positionUs);
}
}
}
@Override
protected void onDisabled() {
source.disable(trackIndex);
subtitle = null;
resetSampleData();
parserThread.quit();
parserThread = null;
parserHelper = null;
clearTextRenderer();
source.disable(trackIndex);
}
@Override
@ -241,12 +247,12 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
@Override
protected boolean isReady() {
// Don't block playback whilst subtitles are loading.
// Note: To change this behavior, it will be necessary to consider [redacted].
// Note: To change this behavior, it will be necessary to consider [Internal: b/12949941].
return true;
}
private void syncNextEventIndex(long timeUs) {
nextSubtitleEventIndex = subtitle == null ? -1 : subtitle.getNextEventTimeIndex(timeUs);
private void syncNextEventIndex(long positionUs) {
nextSubtitleEventIndex = subtitle == null ? -1 : subtitle.getNextEventTimeIndex(positionUs);
}
private long getNextEventTime() {
@ -255,28 +261,20 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
: (subtitle.getEventTime(nextSubtitleEventIndex));
}
private void resetSampleData() {
if (sampleHolder.data != null) {
sampleHolder.data.position(0);
}
}
private void updateTextRenderer(long timeUs) {
String text = subtitle.getText(timeUs);
log("updateTextRenderer; text=: " + text);
private void updateTextRenderer(long positionUs) {
String text = subtitle.getText(positionUs);
if (textRendererHandler != null) {
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, text).sendToTarget();
} else {
invokeTextRenderer(text);
invokeRendererInternal(text);
}
}
private void clearTextRenderer() {
log("clearTextRenderer");
if (textRendererHandler != null) {
textRendererHandler.obtainMessage(MSG_UPDATE_OVERLAY, null).sendToTarget();
} else {
invokeTextRenderer(null);
invokeRendererInternal(null);
}
}
@ -284,20 +282,14 @@ public class TextTrackRenderer extends TrackRenderer implements Callback {
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MSG_UPDATE_OVERLAY:
invokeTextRenderer((String) msg.obj);
invokeRendererInternal((String) msg.obj);
return true;
}
return false;
}
private void invokeTextRenderer(String text) {
private void invokeRendererInternal(String text) {
textRenderer.onText(text);
}
private void log(String logMessage) {
if (VerboseLogUtil.isTagEnabled(TAG)) {
Log.v(TAG, "type=" + AdaptationSet.TYPE_TEXT + ", " + logMessage);
}
}
}

View File

@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer.text.ttml;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.text.Subtitle;
import com.google.android.exoplayer.text.SubtitleParser;
import com.google.android.exoplayer.util.MimeTypes;
@ -72,8 +74,23 @@ public class TtmlParser implements SubtitleParser {
private static final int DEFAULT_TICKRATE = 1;
private final XmlPullParserFactory xmlParserFactory;
private final boolean strictParsing;
/**
* Equivalent to {@code TtmlParser(true)}.
*/
public TtmlParser() {
this(true);
}
/**
* @param strictParsing If true, {@link #parse(InputStream, String, long)} will throw a
* {@link ParserException} if the stream contains invalid ttml. If false, the parser will
* make a best effort to ignore minor errors in the stream. Note however that a
* {@link ParserException} will still be thrown when this is not possible.
*/
public TtmlParser(boolean strictParsing) {
this.strictParsing = strictParsing;
try {
xmlParserFactory = XmlPullParserFactory.newInstance();
} catch (XmlPullParserException e) {
@ -89,21 +106,31 @@ public class TtmlParser implements SubtitleParser {
xmlParser.setInput(inputStream, inputEncoding);
TtmlSubtitle ttmlSubtitle = null;
LinkedList<TtmlNode> nodeStack = new LinkedList<TtmlNode>();
int unsupportedTagDepth = 0;
int unsupportedNodeDepth = 0;
int eventType = xmlParser.getEventType();
while (eventType != XmlPullParser.END_DOCUMENT) {
TtmlNode parent = nodeStack.peekLast();
if (unsupportedTagDepth == 0) {
if (unsupportedNodeDepth == 0) {
String name = xmlParser.getName();
if (eventType == XmlPullParser.START_TAG) {
if (!isSupportedTag(name)) {
Log.w(TAG, "Ignoring unsupported tag: " + xmlParser.getName());
unsupportedTagDepth++;
Log.i(TAG, "Ignoring unsupported tag: " + xmlParser.getName());
unsupportedNodeDepth++;
} else {
TtmlNode node = parseNode(xmlParser, parent);
nodeStack.addLast(node);
if (parent != null) {
parent.addChild(node);
try {
TtmlNode node = parseNode(xmlParser, parent);
nodeStack.addLast(node);
if (parent != null) {
parent.addChild(node);
}
} catch (ParserException e) {
if (strictParsing) {
throw e;
} else {
Log.e(TAG, "Suppressing parser error", e);
// Treat the node (and by extension, all of its children) as unsupported.
unsupportedNodeDepth++;
}
}
}
} else if (eventType == XmlPullParser.TEXT) {
@ -116,9 +143,9 @@ public class TtmlParser implements SubtitleParser {
}
} else {
if (eventType == XmlPullParser.START_TAG) {
unsupportedTagDepth++;
unsupportedNodeDepth++;
} else if (eventType == XmlPullParser.END_TAG) {
unsupportedTagDepth--;
unsupportedNodeDepth--;
}
}
xmlParser.next();
@ -126,7 +153,7 @@ public class TtmlParser implements SubtitleParser {
}
return ttmlSubtitle;
} catch (XmlPullParserException xppe) {
throw new IOException("Unable to parse source", xppe);
throw new ParserException("Unable to parse source", xppe);
}
}
@ -135,7 +162,7 @@ public class TtmlParser implements SubtitleParser {
return MimeTypes.APPLICATION_TTML.equals(mimeType);
}
private TtmlNode parseNode(XmlPullParser parser, TtmlNode parent) {
private TtmlNode parseNode(XmlPullParser parser, TtmlNode parent) throws ParserException {
long duration = 0;
long startTime = TtmlNode.UNDEFINED_TIME;
long endTime = TtmlNode.UNDEFINED_TIME;
@ -209,10 +236,10 @@ public class TtmlParser implements SubtitleParser {
* @param subframeRate The sub-framerate of the stream
* @param tickRate The tick rate of the stream.
* @return The parsed timestamp in microseconds.
* @throws NumberFormatException If the given string does not contain a valid time expression.
* @throws ParserException If the given string does not contain a valid time expression.
*/
private static long parseTimeExpression(String time, int frameRate, int subframeRate,
int tickRate) {
int tickRate) throws ParserException {
Matcher matcher = CLOCK_TIME.matcher(time);
if (matcher.matches()) {
String hours = matcher.group(1);
@ -228,29 +255,29 @@ public class TtmlParser implements SubtitleParser {
String subframes = matcher.group(6);
durationSeconds += (subframes != null) ?
((double) Long.parseLong(subframes)) / subframeRate / frameRate : 0;
return (long) (durationSeconds * 1000000);
return (long) (durationSeconds * C.MICROS_PER_SECOND);
}
matcher = OFFSET_TIME.matcher(time);
if (matcher.matches()) {
String timeValue = matcher.group(1);
double value = Double.parseDouble(timeValue);
double offsetSeconds = Double.parseDouble(timeValue);
String unit = matcher.group(2);
if (unit.equals("h")) {
value *= 3600L * 1000000L;
offsetSeconds *= 3600;
} else if (unit.equals("m")) {
value *= 60 * 1000000;
offsetSeconds *= 60;
} else if (unit.equals("s")) {
value *= 1000000;
// Do nothing.
} else if (unit.equals("ms")) {
value *= 1000;
offsetSeconds /= 1000;
} else if (unit.equals("f")) {
value = value / frameRate * 1000000;
offsetSeconds /= frameRate;
} else if (unit.equals("t")) {
value = value / tickRate * 1000000;
offsetSeconds /= tickRate;
}
return (long) value;
return (long) (offsetSeconds * C.MICROS_PER_SECOND);
}
throw new NumberFormatException("Malformed time expression: " + time);
throw new ParserException("Malformed time expression: " + time);
}
}

View File

@ -0,0 +1,226 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.text.webvtt;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.text.SubtitleParser;
import com.google.android.exoplayer.util.MimeTypes;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* A simple WebVTT parser.
* <p>
* @see <a href="http://dev.w3.org/html5/webvtt">WebVTT specification</a>
* <p>
*/
public class WebvttParser implements SubtitleParser {
/**
* This parser allows a custom header to be prepended to the WebVTT data, in the form of a text
* line starting with this string.
*
* @hide
*/
public static final String EXO_HEADER = "EXO-HEADER";
/**
* A {@code OFFSET + value} element can be added to the custom header to specify an offset time
* (in microseconds) that should be subtracted from the embedded MPEGTS value.
*
* @hide
*/
public static final String OFFSET = "OFFSET:";
private static final long SAMPLING_RATE = 90;
private static final String WEBVTT_METADATA_HEADER_STRING = "\\S*[:=]\\S*";
private static final Pattern WEBVTT_METADATA_HEADER =
Pattern.compile(WEBVTT_METADATA_HEADER_STRING);
private static final String WEBVTT_TIMESTAMP_STRING = "(\\d+:)?[0-5]\\d:[0-5]\\d\\.\\d{3}";
private static final Pattern WEBVTT_TIMESTAMP = Pattern.compile(WEBVTT_TIMESTAMP_STRING);
private static final Pattern MEDIA_TIMESTAMP_OFFSET = Pattern.compile(OFFSET + "\\d+");
private static final Pattern MEDIA_TIMESTAMP = Pattern.compile("MPEGTS:\\d+");
private final boolean strictParsing;
public WebvttParser() {
this(true);
}
public WebvttParser(boolean strictParsing) {
this.strictParsing = strictParsing;
}
@Override
public WebvttSubtitle parse(InputStream inputStream, String inputEncoding, long startTimeUs)
throws IOException {
ArrayList<WebvttCue> subtitles = new ArrayList<WebvttCue>();
long mediaTimestampUs = startTimeUs;
long mediaTimestampOffsetUs = 0;
BufferedReader webvttData = new BufferedReader(new InputStreamReader(inputStream, C.UTF8_NAME));
String line;
// file should start with "WEBVTT" on the first line or "EXO-HEADER"
line = webvttData.readLine();
if (line == null) {
throw new ParserException("Expected WEBVTT or EXO-HEADER. Got null");
}
if (line.startsWith(EXO_HEADER)) {
// parse the timestamp offset, if present
Matcher matcher = MEDIA_TIMESTAMP_OFFSET.matcher(line);
if (matcher.find()) {
mediaTimestampOffsetUs = Long.parseLong(matcher.group().substring(7));
}
// read the next line, which should now be WEBVTT
line = webvttData.readLine();
if (line == null) {
throw new ParserException("Expected WEBVTT. Got null");
}
}
if (!line.equals("WEBVTT")) {
throw new ParserException("Expected WEBVTT. Got " + line);
}
// parse the remainder of the header
while (true) {
line = webvttData.readLine();
if (line == null) {
// we reached EOF before finishing the header
throw new ParserException("Expected an empty line after webvtt header");
} else if (line.isEmpty()) {
// we've read the newline that separates the header from the body
break;
}
Matcher matcher = WEBVTT_METADATA_HEADER.matcher(line);
if (!matcher.find()) {
handleNoncompliantLine(line);
}
if (line.startsWith("X-TIMESTAMP-MAP")) {
// parse the media timestamp
Matcher timestampMatcher = MEDIA_TIMESTAMP.matcher(line);
if (!timestampMatcher.find()) {
throw new ParserException("X-TIMESTAMP-MAP doesn't contain media timestamp: " + line);
} else {
mediaTimestampUs = (Long.parseLong(timestampMatcher.group().substring(7)) * 1000)
/ SAMPLING_RATE - mediaTimestampOffsetUs;
}
mediaTimestampUs = getAdjustedStartTime(mediaTimestampUs);
}
}
// process the cues and text
while ((line = webvttData.readLine()) != null) {
// parse the cue timestamps
Matcher matcher = WEBVTT_TIMESTAMP.matcher(line);
long startTime;
long endTime;
String text = "";
// parse start timestamp
if (!matcher.find()) {
throw new ParserException("Expected cue start time: " + line);
} else {
startTime = parseTimestampUs(matcher.group()) + mediaTimestampUs;
}
// parse end timestamp
if (!matcher.find()) {
throw new ParserException("Expected cue end time: " + line);
} else {
endTime = parseTimestampUs(matcher.group()) + mediaTimestampUs;
}
// parse text
while (((line = webvttData.readLine()) != null) && (!line.isEmpty())) {
text += line.trim() + "\n";
}
WebvttCue cue = new WebvttCue(startTime, endTime, text);
subtitles.add(cue);
}
webvttData.close();
inputStream.close();
// copy WebvttCue data into arrays for WebvttSubtitle constructor
String[] cueText = new String[subtitles.size()];
long[] cueTimesUs = new long[2 * subtitles.size()];
for (int subtitleIndex = 0; subtitleIndex < subtitles.size(); subtitleIndex++) {
int arrayIndex = subtitleIndex * 2;
WebvttCue cue = subtitles.get(subtitleIndex);
cueTimesUs[arrayIndex] = cue.startTime;
cueTimesUs[arrayIndex + 1] = cue.endTime;
cueText[subtitleIndex] = cue.text;
}
WebvttSubtitle subtitle = new WebvttSubtitle(cueText, mediaTimestampUs, cueTimesUs);
return subtitle;
}
@Override
public boolean canParse(String mimeType) {
return MimeTypes.TEXT_VTT.equals(mimeType);
}
protected long getAdjustedStartTime(long startTimeUs) {
return startTimeUs;
}
protected void handleNoncompliantLine(String line) throws ParserException {
if (strictParsing) {
throw new ParserException("Unexpected line: " + line);
}
}
private static long parseTimestampUs(String s) throws NumberFormatException {
if (!s.matches(WEBVTT_TIMESTAMP_STRING)) {
throw new NumberFormatException("has invalid format");
}
String[] parts = s.split("\\.", 2);
long value = 0;
for (String group : parts[0].split(":")) {
value = value * 60 + Long.parseLong(group);
}
return (value * 1000 + Long.parseLong(parts[1])) * 1000;
}
private static class WebvttCue {
public final long startTime;
public final long endTime;
public final String text;
public WebvttCue(long startTime, long endTime, String text) {
this.startTime = startTime;
this.endTime = endTime;
this.text = text;
}
}
}

View File

@ -0,0 +1,99 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.text.webvtt;
import com.google.android.exoplayer.text.Subtitle;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import java.util.Arrays;
/**
* A representation of a WebVTT subtitle.
*/
public class WebvttSubtitle implements Subtitle {
private final String[] cueText;
private final long startTimeUs;
private final long[] cueTimesUs;
private final long[] sortedCueTimesUs;
/**
* @param cueText Text to be displayed during each cue.
* @param startTimeUs The start time of the subtitle.
* @param cueTimesUs Cue event times, where cueTimesUs[2 * i] and cueTimesUs[(2 * i) + 1] are
* the start and end times, respectively, corresponding to cueText[i].
*/
public WebvttSubtitle(String[] cueText, long startTimeUs, long[] cueTimesUs) {
this.cueText = cueText;
this.startTimeUs = startTimeUs;
this.cueTimesUs = cueTimesUs;
this.sortedCueTimesUs = Arrays.copyOf(cueTimesUs, cueTimesUs.length);
Arrays.sort(sortedCueTimesUs);
}
@Override
public long getStartTime() {
return startTimeUs;
}
@Override
public int getNextEventTimeIndex(long timeUs) {
Assertions.checkArgument(timeUs >= 0);
int index = Util.binarySearchCeil(sortedCueTimesUs, timeUs, false, false);
return index < sortedCueTimesUs.length ? index : -1;
}
@Override
public int getEventTimeCount() {
return sortedCueTimesUs.length;
}
@Override
public long getEventTime(int index) {
Assertions.checkArgument(index >= 0);
Assertions.checkArgument(index < sortedCueTimesUs.length);
return sortedCueTimesUs[index];
}
@Override
public long getLastEventTime() {
if (getEventTimeCount() == 0) {
return -1;
}
return sortedCueTimesUs[sortedCueTimesUs.length - 1];
}
@Override
public String getText(long timeUs) {
StringBuilder stringBuilder = new StringBuilder();
for (int i = 0; i < cueTimesUs.length; i += 2) {
if ((cueTimesUs[i] <= timeUs) && (timeUs < cueTimesUs[i + 1])) {
stringBuilder.append(cueText[i / 2]);
}
}
int stringLength = stringBuilder.length();
if (stringLength > 0 && stringBuilder.charAt(stringLength - 1) == '\n') {
// Adjust the length to remove the trailing newline character.
stringLength -= 1;
}
return stringLength == 0 ? null : stringBuilder.substring(0, stringLength);
}
}

View File

@ -119,26 +119,6 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
return resolvedLength != C.LENGTH_UNBOUNDED && loadPosition == resolvedLength;
}
/**
* Returns a byte array containing the loaded data. If the data is partially loaded, this method
* returns the portion of the data that has been loaded so far. If nothing has been loaded, null
* is returned. This method does not use or update the current read position.
* <p>
* Note: The read methods provide a more efficient way of consuming the loaded data. Use this
* method only when a freshly allocated byte[] containing all of the loaded data is required.
*
* @return The loaded data, or null.
*/
public final byte[] getLoadedData() {
if (loadPosition == 0) {
return null;
}
byte[] rawData = new byte[(int) loadPosition];
read(null, rawData, 0, new ReadHead(), rawData.length);
return rawData;
}
// {@link NonBlockingInputStream} implementation.
@Override

View File

@ -53,6 +53,15 @@ public final class DataSpec {
*/
public final String key;
/**
* Construct a {@link DataSpec} for the given uri and with {@link #key} set to null.
*
* @param uri {@link #uri}.
*/
public DataSpec(Uri uri) {
this(uri, 0, C.LENGTH_UNBOUNDED, null);
}
/**
* Construct a {@link DataSpec} for which {@link #uriIsFullStream} is true.
*

View File

@ -36,8 +36,27 @@ public final class FileDataSource implements DataSource {
}
private final TransferListener listener;
private RandomAccessFile file;
private long bytesRemaining;
private boolean opened;
/**
* Constructs a new {@link DataSource} that retrieves data from a file.
*/
public FileDataSource() {
this(null);
}
/**
* Constructs a new {@link DataSource} that retrieves data from a file.
*
* @param listener An optional listener. Specify {@code null} for no listener.
*/
public FileDataSource(TransferListener listener) {
this.listener = listener;
}
@Override
public long open(DataSpec dataSpec) throws FileDataSourceException {
@ -46,10 +65,16 @@ public final class FileDataSource implements DataSource {
file.seek(dataSpec.position);
bytesRemaining = dataSpec.length == C.LENGTH_UNBOUNDED ? file.length() - dataSpec.position
: dataSpec.length;
return bytesRemaining;
} catch (IOException e) {
throw new FileDataSourceException(e);
}
opened = true;
if (listener != null) {
listener.onTransferStart();
}
return bytesRemaining;
}
@Override
@ -63,7 +88,14 @@ public final class FileDataSource implements DataSource {
} catch (IOException e) {
throw new FileDataSourceException(e);
}
bytesRemaining -= bytesRead;
if (bytesRead > 0) {
bytesRemaining -= bytesRead;
if (listener != null) {
listener.onBytesTransferred(bytesRead);
}
}
return bytesRead;
}
}
@ -75,8 +107,16 @@ public final class FileDataSource implements DataSource {
file.close();
} catch (IOException e) {
throw new FileDataSourceException(e);
} finally {
file = null;
if (opened) {
opened = false;
if (listener != null) {
listener.onTransferEnd();
}
}
}
file = null;
}
}

View File

@ -376,7 +376,7 @@ public class HttpDataSource implements DataSource {
connection.setReadTimeout(readTimeoutMillis);
connection.setDoOutput(false);
synchronized (requestProperties) {
for (HashMap.Entry<String, String> property : requestProperties.entrySet()) {
for (Map.Entry<String, String> property : requestProperties.entrySet()) {
connection.setRequestProperty(property.getKey(), property.getValue());
}
}

View File

@ -20,6 +20,7 @@ import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
@ -72,22 +73,28 @@ public final class Loader {
/**
* Interface definition for a callback to be notified of {@link Loader} events.
*/
public interface Listener {
public interface Callback {
/**
* Invoked when loading has been canceled.
*
* @param loadable The loadable whose load has been canceled.
*/
void onCanceled();
void onLoadCanceled(Loadable loadable);
/**
* Invoked when the data source has been fully loaded.
*
* @param loadable The loadable whose load has completed.
*/
void onLoaded();
void onLoadCompleted(Loadable loadable);
/**
* Invoked when the data source is stopped due to an error.
*
* @param loadable The loadable whose load has failed.
*/
void onError(IOException exception);
void onLoadError(Loadable loadable, IOException exception);
}
@ -95,18 +102,29 @@ public final class Loader {
private static final int MSG_ERROR = 1;
private final ExecutorService downloadExecutorService;
private final Listener listener;
private LoadTask currentTask;
private boolean loading;
/**
* @param threadName A name for the loader's thread.
* @param listener A listener to invoke when state changes occur.
*/
public Loader(String threadName, Listener listener) {
public Loader(String threadName) {
this.downloadExecutorService = Util.newSingleThreadExecutor(threadName);
this.listener = listener;
}
/**
* Invokes {@link #startLoading(Looper, Loadable, Callback)}, using the {@link Looper}
* associated with the calling thread.
*
* @param loadable The {@link Loadable} to load.
* @param callback A callback to invoke when the load ends.
* @throws IllegalStateException If the calling thread does not have an associated {@link Looper}.
*/
public void startLoading(Loadable loadable, Callback callback) {
Looper myLooper = Looper.myLooper();
Assertions.checkState(myLooper != null);
startLoading(myLooper, loadable, callback);
}
/**
@ -115,12 +133,14 @@ public final class Loader {
* A {@link Loader} instance can only load one {@link Loadable} at a time, and so this method
* must not be called when another load is in progress.
*
* @param looper The looper of the thread on which the callback should be invoked.
* @param loadable The {@link Loadable} to load.
* @param callback A callback to invoke when the load ends.
*/
public void startLoading(Loadable loadable) {
public void startLoading(Looper looper, Loadable loadable, Callback callback) {
Assertions.checkState(!loading);
loading = true;
currentTask = new LoadTask(loadable);
currentTask = new LoadTask(looper, loadable, callback);
downloadExecutorService.submit(currentTask);
}
@ -161,11 +181,14 @@ public final class Loader {
private static final String TAG = "LoadTask";
private final Loadable loadable;
private final Loader.Callback callback;
private volatile Thread executorThread;
public LoadTask(Loadable loadable) {
public LoadTask(Looper looper, Loadable loadable, Loader.Callback callback) {
super(looper);
this.loadable = loadable;
this.callback = callback;
}
public void quit() {
@ -200,15 +223,15 @@ public final class Loader {
public void handleMessage(Message msg) {
onFinished();
if (loadable.isLoadCanceled()) {
listener.onCanceled();
callback.onLoadCanceled(loadable);
return;
}
switch (msg.what) {
case MSG_END_OF_SOURCE:
listener.onLoaded();
callback.onLoadCompleted(loadable);
break;
case MSG_ERROR:
listener.onError((IOException) msg.obj);
callback.onLoadError(loadable, (IOException) msg.obj);
break;
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.util.Assertions;
import java.io.IOException;
/**
* A data source that fetches data from a local or remote {@link DataSpec}.
*/
public final class UriDataSource implements DataSource {
private static final String FILE_URI_SCHEME = "file";
private final DataSource fileDataSource;
private final DataSource httpDataSource;
/**
* {@code null} if no data source is open. Otherwise, equal to {@link #fileDataSource} if the open
* data source is a file, or {@link #httpDataSource} otherwise.
*/
private DataSource dataSource;
/**
* Constructs a new data source that delegates to a {@link FileDataSource} for file URIs and an
* {@link HttpDataSource} for other URIs.
*
* @param userAgent The User-Agent string that should be used when requesting remote data.
* @param transferListener An optional listener.
*/
public UriDataSource(String userAgent, TransferListener transferListener) {
this(new FileDataSource(transferListener),
new HttpDataSource(userAgent, null, transferListener));
}
/**
* Constructs a new data source using {@code fileDataSource} for file URIs, and
* {@code httpDataSource} for non-file URIs.
*
* @param fileDataSource {@link DataSource} to use for file URIs.
* @param httpDataSource {@link DataSource} to use for non-file URIs.
*/
public UriDataSource(DataSource fileDataSource, DataSource httpDataSource) {
this.fileDataSource = Assertions.checkNotNull(fileDataSource);
this.httpDataSource = Assertions.checkNotNull(httpDataSource);
}
@Override
public long open(DataSpec dataSpec) throws IOException {
Assertions.checkState(dataSource == null);
dataSource = FILE_URI_SCHEME.equals(dataSpec.uri.getScheme()) ? fileDataSource : httpDataSource;
return dataSource.open(dataSpec);
}
@Override
public int read(byte[] buffer, int offset, int readLength) throws IOException {
return dataSource.read(buffer, offset, readLength);
}
@Override
public void close() throws IOException {
if (dataSource != null) {
dataSource.close();
dataSource = null;
}
}
}

View File

@ -15,127 +15,373 @@
*/
package com.google.android.exoplayer.util;
import com.google.android.exoplayer.ParserException;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.util.Pair;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.concurrent.CancellationException;
/**
* An {@link AsyncTask} for loading and parsing media manifests.
* Performs both single and repeated loads of media manifests.
*
* @param <T> The type of the manifest being parsed.
* @param <T> The type of manifest.
*/
public abstract class ManifestFetcher<T> extends AsyncTask<String, Void, T> {
public class ManifestFetcher<T> implements Loader.Callback {
/**
* Invoked with the result of a manifest fetch.
* Interface definition for a callback to be notified of {@link ManifestFetcher} events.
*/
public interface EventListener {
public void onManifestRefreshStarted();
public void onManifestRefreshed();
public void onManifestError(IOException e);
}
/**
* Callback for the result of a single load.
*
* @param <T> The type of the manifest being parsed.
* @param <T> The type of manifest.
*/
public interface ManifestCallback<T> {
/**
* Invoked from {@link #onPostExecute(Object)} with the parsed manifest.
* Invoked when the load has successfully completed.
*
* @param contentId The content id of the media.
* @param manifest The parsed manifest.
* @param manifest The loaded manifest.
*/
void onManifest(String contentId, T manifest);
/**
* Invoked from {@link #onPostExecute(Object)} if an error occurred.
* Invoked when the load has failed.
*
* @param contentId The content id of the media.
* @param e The error.
* @param e The cause of the failure.
*/
void onManifestError(String contentId, Exception e);
void onManifestError(String contentId, IOException e);
}
public static final int DEFAULT_HTTP_TIMEOUT_MILLIS = 8000;
/* package */ final ManifestParser<T> parser;
/* package */ final String contentId;
/* package */ final String userAgent;
private final Handler eventHandler;
private final EventListener eventListener;
private final ManifestCallback<T> callback;
private final int timeoutMillis;
/* package */ volatile String manifestUrl;
private volatile String contentId;
private volatile Exception exception;
private int enabledCount;
private Loader loader;
private ManifestLoadable currentLoadable;
/**
* @param callback The callback to provide with the parsed manifest (or error).
*/
public ManifestFetcher(ManifestCallback<T> callback) {
this(callback, DEFAULT_HTTP_TIMEOUT_MILLIS);
private int loadExceptionCount;
private long loadExceptionTimestamp;
private IOException loadException;
private volatile T manifest;
private volatile long manifestLoadTimestamp;
public ManifestFetcher(ManifestParser<T> parser, String contentId, String manifestUrl,
String userAgent) {
this(parser, contentId, manifestUrl, userAgent, null, null);
}
/**
* @param callback The callback to provide with the parsed manifest (or error).
* @param timeoutMillis The timeout in milliseconds for the connection used to load the data.
* @param parser A parser to parse the loaded manifest data.
* @param contentId The content id of the content being loaded. May be null.
* @param manifestUrl The manifest location.
* @param userAgent The User-Agent string that should be used.
*/
public ManifestFetcher(ManifestCallback<T> callback, int timeoutMillis) {
this.callback = callback;
this.timeoutMillis = timeoutMillis;
public ManifestFetcher(ManifestParser<T> parser, String contentId, String manifestUrl,
String userAgent, Handler eventHandler, EventListener eventListener) {
this.parser = parser;
this.contentId = contentId;
this.manifestUrl = manifestUrl;
this.userAgent = userAgent;
this.eventHandler = eventHandler;
this.eventListener = eventListener;
}
/**
* Updates the manifest location.
*
* @param manifestUrl The manifest location.
*/
public void updateManifestUrl(String manifestUrl) {
this.manifestUrl = manifestUrl;
}
/**
* Performs a single manifest load.
*
* @param callbackLooper The looper associated with the thread on which the callback should be
* invoked.
* @param callback The callback to receive the result.
*/
public void singleLoad(Looper callbackLooper, final ManifestCallback<T> callback) {
SingleFetchHelper fetchHelper = new SingleFetchHelper(callbackLooper, callback);
fetchHelper.startLoading();
}
/**
* Gets a {@link Pair} containing the most recently loaded manifest together with the timestamp
* at which the load completed.
*
* @return The most recently loaded manifest and the timestamp at which the load completed, or
* null if no manifest has loaded.
*/
public T getManifest() {
return manifest;
}
/**
* Gets the value of {@link SystemClock#elapsedRealtime()} when the last load completed.
*
* @return The value of {@link SystemClock#elapsedRealtime()} when the last load completed.
*/
public long getManifestLoadTimestamp() {
return manifestLoadTimestamp;
}
/**
* Gets the error that affected the most recent attempt to load the manifest, or null if the
* most recent attempt was successful.
*
* @return The error, or null if the most recent attempt was successful.
*/
public IOException getError() {
if (loadExceptionCount <= 1) {
// Don't report an exception until at least 1 retry attempt has been made.
return null;
}
return loadException;
}
/**
* Enables refresh functionality.
*/
public void enable() {
if (enabledCount++ == 0) {
loadExceptionCount = 0;
loadException = null;
}
}
/**
* Disables refresh functionality.
*/
public void disable() {
if (--enabledCount == 0) {
if (loader != null) {
loader.release();
loader = null;
}
}
}
/**
* Should be invoked repeatedly by callers who require an updated manifest.
*/
public void requestRefresh() {
if (loadException != null && SystemClock.elapsedRealtime()
< (loadExceptionTimestamp + getRetryDelayMillis(loadExceptionCount))) {
// The previous load failed, and it's too soon to try again.
return;
}
if (loader == null) {
loader = new Loader("manifestLoader");
}
if (!loader.isLoading()) {
currentLoadable = new ManifestLoadable();
loader.startLoading(currentLoadable, this);
notifyManifestRefreshStarted();
}
}
@Override
protected final T doInBackground(String... data) {
try {
contentId = data.length > 1 ? data[1] : null;
String urlString = data[0];
String inputEncoding = null;
public void onLoadCompleted(Loadable loadable) {
if (currentLoadable != loadable) {
// Stale event.
return;
}
manifest = currentLoadable.result;
manifestLoadTimestamp = SystemClock.elapsedRealtime();
loadExceptionCount = 0;
loadException = null;
notifyManifestRefreshed();
}
@Override
public void onLoadCanceled(Loadable loadable) {
// Do nothing.
}
@Override
public void onLoadError(Loadable loadable, IOException exception) {
if (currentLoadable != loadable) {
// Stale event.
return;
}
loadExceptionCount++;
loadExceptionTimestamp = SystemClock.elapsedRealtime();
loadException = new IOException(exception);
notifyManifestError(loadException);
}
private long getRetryDelayMillis(long errorCount) {
return Math.min((errorCount - 1) * 1000, 5000);
}
private void notifyManifestRefreshStarted() {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onManifestRefreshStarted();
}
});
}
}
private void notifyManifestRefreshed() {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onManifestRefreshed();
}
});
}
}
private void notifyManifestError(final IOException e) {
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
public void run() {
eventListener.onManifestError(e);
}
});
}
}
private class SingleFetchHelper implements Loader.Callback {
private final Looper callbackLooper;
private final ManifestCallback<T> wrappedCallback;
private final Loader singleUseLoader;
private final ManifestLoadable singleUseLoadable;
public SingleFetchHelper(Looper callbackLooper, ManifestCallback<T> wrappedCallback) {
this.callbackLooper = callbackLooper;
this.wrappedCallback = wrappedCallback;
singleUseLoader = new Loader("manifestLoader:single");
singleUseLoadable = new ManifestLoadable();
}
public void startLoading() {
singleUseLoader.startLoading(callbackLooper, singleUseLoadable, this);
}
@Override
public void onLoadCompleted(Loadable loadable) {
try {
manifest = singleUseLoadable.result;
manifestLoadTimestamp = SystemClock.elapsedRealtime();
wrappedCallback.onManifest(contentId, singleUseLoadable.result);
} finally {
releaseLoader();
}
}
@Override
public void onLoadCanceled(Loadable loadable) {
// This shouldn't ever happen, but handle it anyway.
try {
IOException exception = new IOException("Load cancelled", new CancellationException());
wrappedCallback.onManifestError(contentId, exception);
} finally {
releaseLoader();
}
}
@Override
public void onLoadError(Loadable loadable, IOException exception) {
try {
wrappedCallback.onManifestError(contentId, exception);
} finally {
releaseLoader();
}
}
private void releaseLoader() {
singleUseLoader.release();
}
}
private class ManifestLoadable implements Loadable {
private static final int TIMEOUT_MILLIS = 10000;
/* package */ volatile T result;
private volatile boolean isCanceled;
@Override
public void cancelLoad() {
// We don't actually cancel anything, but we need to record the cancellation so that
// isLoadCanceled can return the correct value.
isCanceled = true;
}
@Override
public boolean isLoadCanceled() {
return isCanceled;
}
@Override
public void load() throws IOException, InterruptedException {
String inputEncoding;
InputStream inputStream = null;
try {
Uri baseUrl = Util.parseBaseUri(urlString);
HttpURLConnection connection = configureHttpConnection(new URL(urlString));
URLConnection connection = configureConnection(new URL(manifestUrl));
inputStream = connection.getInputStream();
inputEncoding = connection.getContentEncoding();
return parse(inputStream, inputEncoding, contentId, baseUrl);
result = parser.parse(inputStream, inputEncoding, contentId,
Util.parseBaseUri(connection.getURL().toString()));
} finally {
if (inputStream != null) {
inputStream.close();
}
}
} catch (Exception e) {
exception = e;
return null;
}
}
@Override
protected final void onPostExecute(T manifest) {
if (exception != null) {
callback.onManifestError(contentId, exception);
} else {
callback.onManifest(contentId, manifest);
private URLConnection configureConnection(URL url) throws IOException {
URLConnection connection = url.openConnection();
connection.setConnectTimeout(TIMEOUT_MILLIS);
connection.setReadTimeout(TIMEOUT_MILLIS);
connection.setDoOutput(false);
connection.setRequestProperty("User-Agent", userAgent);
connection.connect();
return connection;
}
}
/**
* Reads the {@link InputStream} and parses it into a manifest. Invoked from the
* {@link AsyncTask}'s background thread.
*
* @param stream The input stream to read.
* @param inputEncoding The encoding of the input stream.
* @param contentId The content id of the media.
* @param baseUrl Required where the manifest contains urls that are relative to a base url. May
* be null where this is not the case.
* @throws IOException If an error occurred loading the data.
* @throws ParserException If an error occurred parsing the loaded data.
*/
protected abstract T parse(InputStream stream, String inputEncoding, String contentId,
Uri baseUrl) throws IOException, ParserException;
private HttpURLConnection configureHttpConnection(URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setConnectTimeout(timeoutMillis);
connection.setReadTimeout(timeoutMillis);
connection.setDoOutput(false);
connection.connect();
return connection;
}
}

Some files were not shown because too many files have changed in this diff Show More