Video format selection.

Making Representation and TrackElement extend Format will simplify
this further (TBC whether this is a good idea!).

Issue: #393
This commit is contained in:
Oliver Woodman 2015-05-01 20:27:11 +01:00
parent fafcd79e1b
commit 9b112cf94d
7 changed files with 311 additions and 50 deletions

View File

@ -222,10 +222,10 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
String userAgent = Util.getUserAgent(this, "ExoPlayerDemo");
switch (contentType) {
case DemoUtil.TYPE_SS:
return new SmoothStreamingRendererBuilder(userAgent, contentUri.toString(),
return new SmoothStreamingRendererBuilder(this, userAgent, contentUri.toString(),
new SmoothStreamingTestMediaDrmCallback(), debugTextView);
case DemoUtil.TYPE_DASH:
return new DashRendererBuilder(userAgent, contentUri.toString(),
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
new WidevineTestMediaDrmCallback(contentId), debugTextView, audioCapabilities);
case DemoUtil.TYPE_HLS:
return new HlsRendererBuilder(userAgent, contentUri.toString(), debugTextView);

View File

@ -19,7 +19,6 @@ import com.google.android.exoplayer.C;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.SampleSource;
@ -31,6 +30,7 @@ import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.dash.DashChunkSource;
import com.google.android.exoplayer.dash.mpd.AdaptationSet;
import com.google.android.exoplayer.dash.mpd.MediaPresentationDescription;
@ -56,10 +56,10 @@ import com.google.android.exoplayer.upstream.DefaultUriDataSource;
import com.google.android.exoplayer.upstream.HttpDataSource;
import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.ManifestFetcher.ManifestCallback;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.UnsupportedSchemeException;
@ -102,6 +102,7 @@ public class DashRendererBuilder implements RendererBuilder,
private static final String[] PASSTHROUGH_CODECS_PRIORITY =
new String[] {"ec-3", "ac-3"};
private final Context context;
private final String userAgent;
private final String url;
private final MediaDrmCallback drmCallback;
@ -116,8 +117,9 @@ public class DashRendererBuilder implements RendererBuilder,
private MediaPresentationDescription manifest;
private long elapsedRealtimeOffset;
public DashRendererBuilder(String userAgent, String url, MediaDrmCallback drmCallback,
TextView debugTextView, AudioCapabilities audioCapabilities) {
public DashRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, TextView debugTextView, AudioCapabilities audioCapabilities) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.drmCallback = drmCallback;
@ -214,39 +216,25 @@ public class DashRendererBuilder implements RendererBuilder,
}
// Determine which video representations we should use for playback.
ArrayList<Integer> videoRepresentationIndexList = new ArrayList<Integer>();
int[] videoRepresentationIndices = null;
if (videoAdaptationSet != null) {
int maxDecodableFrameSize;
Format[] formats = getFormats(videoAdaptationSet.representations);
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
videoRepresentationIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(
context, formats, null, filterHdContent);
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
List<Representation> videoRepresentations = videoAdaptationSet.representations;
for (int i = 0; i < videoRepresentations.size(); i++) {
Format format = videoRepresentations.get(i).format;
if (filterHdContent && (format.width >= 1280 || format.height >= 720)) {
// Filtering HD content
} else if (format.width * format.height > maxDecodableFrameSize) {
// Filtering stream that device cannot play
} else if (!format.mimeType.equals(MimeTypes.VIDEO_MP4)
&& !format.mimeType.equals(MimeTypes.VIDEO_WEBM)) {
// Filtering unsupported mime type
} else {
videoRepresentationIndexList.add(i);
}
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
final TrackRenderer debugRenderer;
if (videoRepresentationIndexList.isEmpty()) {
if (videoRepresentationIndices == null || videoRepresentationIndices.length == 0) {
videoRenderer = null;
debugRenderer = null;
} else {
int[] videoRepresentationIndices = Util.toArray(videoRepresentationIndexList);
DataSource videoDataSource = new DefaultUriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, videoAdaptationSetIndex,
videoRepresentationIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter),
@ -375,6 +363,14 @@ public class DashRendererBuilder implements RendererBuilder,
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
}
private static Format[] getFormats(List<Representation> representations) {
Format[] formats = new Format[representations.size()];
for (int i = 0; i < formats.length; i++) {
formats[i] = representations.get(i).format;
}
return formats;
}
@TargetApi(18)
private static class V18Compat {

View File

@ -18,15 +18,16 @@ package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.DefaultLoadControl;
import com.google.android.exoplayer.LoadControl;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.chunk.ChunkSampleSource;
import com.google.android.exoplayer.chunk.ChunkSource;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.chunk.FormatEvaluator;
import com.google.android.exoplayer.chunk.FormatEvaluator.AdaptiveEvaluator;
import com.google.android.exoplayer.chunk.MultiTrackChunkSource;
import com.google.android.exoplayer.chunk.VideoFormatSelectorUtil;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.drm.DrmSessionManager;
@ -48,13 +49,13 @@ import com.google.android.exoplayer.util.ManifestFetcher;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.MediaCodec;
import android.media.UnsupportedSchemeException;
import android.os.Handler;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import java.util.UUID;
/**
@ -69,6 +70,7 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
private static final int TEXT_BUFFER_SEGMENTS = 2;
private static final int LIVE_EDGE_LATENCY_MS = 30000;
private final Context context;
private final String userAgent;
private final String url;
private final MediaDrmCallback drmCallback;
@ -78,8 +80,9 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
private RendererBuilderCallback callback;
private ManifestFetcher<SmoothStreamingManifest> manifestFetcher;
public SmoothStreamingRendererBuilder(String userAgent, String url, MediaDrmCallback drmCallback,
TextView debugTextView) {
public SmoothStreamingRendererBuilder(Context context, String userAgent, String url,
MediaDrmCallback drmCallback, TextView debugTextView) {
this.context = context;
this.userAgent = userAgent;
this.url = url;
this.drmCallback = drmCallback;
@ -125,17 +128,9 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
}
// Obtain stream elements for playback.
int maxDecodableFrameSize;
try {
maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
int audioStreamElementCount = 0;
int textStreamElementCount = 0;
int videoStreamElementIndex = -1;
ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>();
for (int i = 0; i < manifest.streamElements.length; i++) {
if (manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioStreamElementCount++;
@ -144,26 +139,29 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
StreamElement streamElement = manifest.streamElements[i];
for (int j = 0; j < streamElement.tracks.length; j++) {
TrackElement trackElement = streamElement.tracks[j];
if (trackElement.maxWidth * trackElement.maxHeight <= maxDecodableFrameSize) {
videoTrackIndexList.add(j);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
// Determine which video tracks we should use for playback.
int[] videoTrackIndices = null;
if (videoStreamElementIndex != -1) {
Format[] formats = getFormats(manifest.streamElements[videoStreamElementIndex].tracks);
try {
videoTrackIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay(context,
formats, null, false);
} catch (DecoderQueryException e) {
callback.onRenderersError(e);
return;
}
}
// Build the video renderer.
final MediaCodecVideoTrackRenderer videoRenderer;
final TrackRenderer debugRenderer;
if (videoTrackIndexList.isEmpty()) {
if (videoTrackIndices == null || videoTrackIndices.length == 0) {
videoRenderer = null;
debugRenderer = null;
} else {
int[] videoTrackIndices = Util.toArray(videoTrackIndexList);
DataSource videoDataSource = new DefaultUriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
@ -256,6 +254,17 @@ public class SmoothStreamingRendererBuilder implements RendererBuilder,
callback.onRenderers(trackNames, multiTrackChunkSources, renderers);
}
private static Format[] getFormats(TrackElement[] trackElements) {
Format[] formats = new Format[trackElements.length];
for (int i = 0; i < formats.length; i++) {
TrackElement trackElement = trackElements[i];
formats[i] = new Format(String.valueOf(i), trackElement.mimeType, trackElement.maxWidth,
trackElement.maxHeight, -1, trackElement.numChannels, trackElement.sampleRate,
trackElement.bitrate);
}
return formats;
}
@TargetApi(18)
private static class V18Compat {

View File

@ -0,0 +1,214 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.chunk;
import com.google.android.exoplayer.MediaCodecUtil;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.util.Util;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Point;
import android.view.Display;
import android.view.WindowManager;
import java.util.ArrayList;
/**
* Selects from possible video formats.
*/
public final class VideoFormatSelectorUtil {
/**
* If a dimension (i.e. width or height) of a video is greater or equal to this fraction of the
* corresponding viewport dimension, then the video is considered as filling the viewport (in that
* dimension).
*/
private static final float FRACTION_TO_CONSIDER_FULLSCREEN = 0.98f;
/**
* Chooses a suitable subset from a number of video formats, to be rendered on the device's
* default display.
*
* @param context A context.
* @param formats The formats from which to select.
* @param allowedContainerMimeTypes An array of allowed container mime types. Null allows all
* mime types.
* @param filterHdFormats True to filter HD formats. False otherwise.
* @return An array holding the indices of the selected formats.
* @throws DecoderQueryException
*/
public static int[] selectVideoFormatsForDefaultDisplay(Context context, Format[] formats,
String[] allowedContainerMimeTypes, boolean filterHdFormats) throws DecoderQueryException {
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay();
Point displaySize = getDisplaySize(display);
return selectVideoFormats(formats, allowedContainerMimeTypes, filterHdFormats, true,
displaySize.x, displaySize.y);
}
/**
* Chooses a suitable subset from a number of video formats.
* <p>
* A format is filtered (i.e. not selected) if:
* <ul>
* <li>{@code allowedContainerMimeTypes} is non-null and the format does not have one of the
* permitted mime types.
* <li>{@code filterHdFormats} is true and the format is HD.
* <li>It's determined that the video decoder isn't powerful enough to decode the format.
* <li>There exists another format of lower resolution whose resolution exceeds the maximum size
* in pixels that the video can be rendered within the viewport.
* </ul>
*
* @param formats The formats from which to select.
* @param allowedContainerMimeTypes An array of allowed container mime types. Null allows all
* mime types.
* @param filterHdFormats True to filter HD formats. False otherwise.
* @param orientationMayChange True if the video's orientation may change with respect to the
* viewport during playback.
* @param viewportWidth The width in pixels of the viewport within which the video will be
* displayed. If the viewport size may change, this should be set to the maximum possible
* width.
* @param viewportHeight The height in pixels of the viewport within which the video will be
* displayed. If the viewport size may change, this should be set to the maximum possible
* height.
* @return An array holding the indices of the selected formats.
* @throws DecoderQueryException
*/
public static int[] selectVideoFormats(Format[] formats, String[] allowedContainerMimeTypes,
boolean filterHdFormats, boolean orientationMayChange, int viewportWidth, int viewportHeight)
throws DecoderQueryException {
int maxVideoPixelsToRetain = Integer.MAX_VALUE;
ArrayList<Integer> selectedIndexList = new ArrayList<Integer>();
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
// First pass to filter out formats that individually fail to meet the selection criteria.
for (int i = 0; i < formats.length; i++) {
Format format = formats[i];
if (isFormatPlayable(format, allowedContainerMimeTypes, filterHdFormats,
maxDecodableFrameSize)) {
// Select the format for now. It may still be filtered in the second pass below.
selectedIndexList.add(i);
// Keep track of the number of pixels of the selected format whose resolution is the
// smallest to exceed the maximum size at which it can be displayed within the viewport.
// We'll discard formats of higher resolution in a second pass.
if (format.width != -1 && format.height != -1) {
Point maxVideoSizeInViewport = getMaxVideoSizeInViewport(orientationMayChange,
viewportWidth, viewportHeight, format.width, format.height);
int videoPixels = format.width * format.height;
if (format.width >= (int) (maxVideoSizeInViewport.x * FRACTION_TO_CONSIDER_FULLSCREEN)
&& format.height >= (int) (maxVideoSizeInViewport.y * FRACTION_TO_CONSIDER_FULLSCREEN)
&& videoPixels < maxVideoPixelsToRetain) {
maxVideoPixelsToRetain = videoPixels;
}
}
}
}
// Second pass to filter out formats that exceed maxVideoPixelsToRetain. These formats are have
// unnecessarily high resolution given the size at which the video will be displayed within the
// viewport.
for (int i = selectedIndexList.size() - 1; i >= 0; i--) {
Format format = formats[selectedIndexList.get(i)];
int videoPixels = format.width * format.height;
if (format.width != -1 && format.height != -1 && videoPixels > maxVideoPixelsToRetain) {
selectedIndexList.remove(i);
}
}
return Util.toArray(selectedIndexList);
}
/**
* Determines whether an individual format is playable, given an array of allowed container types,
* whether HD formats should be filtered and a maximum decodable frame size in pixels.
*/
private static boolean isFormatPlayable(Format format, String[] allowedContainerMimeTypes,
boolean filterHdFormats, int maxDecodableFrameSize) {
if (allowedContainerMimeTypes != null
&& !Util.contains(allowedContainerMimeTypes, format.mimeType)) {
// Filtering format based on its container mime type.
return false;
}
if (filterHdFormats && (format.width >= 1280 || format.height >= 720)) {
// Filtering format because it's HD.
return false;
}
if (format.width != -1 && format.height != -1) {
// TODO: Use MediaCodecUtil.isSizeAndRateSupportedV21 on API levels >= 21 if we know the
// mimeType of the media samples within the container. Remove the assumption that we're
// dealing with H.264.
if (format.width * format.height > maxDecodableFrameSize) {
// Filtering stream that device cannot play
return false;
}
}
return true;
}
/**
* Given viewport dimensions and video dimensions, computes the maximum size of the video as it
* will be rendered to fit inside of the viewport.
*/
private static Point getMaxVideoSizeInViewport(boolean orientationMayChange, int viewportWidth,
int viewportHeight, int videoWidth, int videoHeight) {
if (orientationMayChange && (videoWidth > videoHeight) != (viewportWidth > viewportHeight)) {
// Rotation is allowed, and the video will be larger in the rotated viewport.
int tempViewportWidth = viewportWidth;
viewportWidth = viewportHeight;
viewportHeight = tempViewportWidth;
}
if (videoWidth * viewportHeight >= videoHeight * viewportWidth) {
// Horizontal letter-boxing along top and bottom.
return new Point(viewportWidth, Util.ceilDivide(viewportWidth * videoHeight, videoWidth));
} else {
// Vertical letter-boxing along edges.
return new Point(Util.ceilDivide(viewportHeight * videoWidth, videoHeight), viewportHeight);
}
}
private static Point getDisplaySize(Display display) {
Point displaySize = new Point();
if (Util.SDK_INT >= 17) {
getDisplaySizeV17(display, displaySize);
} else if (Util.SDK_INT >= 16) {
getDisplaySizeV16(display, displaySize);
} else {
getDisplaySizeV9(display, displaySize);
}
return displaySize;
}
@TargetApi(17)
private static void getDisplaySizeV17(Display display, Point outSize) {
display.getRealSize(outSize);
}
@TargetApi(16)
private static void getDisplaySizeV16(Display display, Point outSize) {
display.getSize(outSize);
}
@SuppressWarnings("deprecation")
private static void getDisplaySizeV9(Display display, Point outSize) {
outSize.x = display.getWidth();
outSize.y = display.getHeight();
}
private VideoFormatSelectorUtil() {}
}

View File

@ -353,7 +353,7 @@ public abstract class SegmentBase {
return DashSegmentIndex.INDEX_UNBOUNDED;
} else {
long durationMs = (duration * 1000) / timescale;
return startNumber + (int) ((periodDurationMs + durationMs - 1) / durationMs) - 1;
return startNumber + (int) Util.ceilDivide(periodDurationMs, durationMs) - 1;
}
}

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
import java.util.Arrays;
@ -64,7 +65,7 @@ public final class BufferPool implements Allocator {
@Override
public synchronized void trim(int targetSize) {
int targetBufferCount = (targetSize + bufferLength - 1) / bufferLength;
int targetBufferCount = Util.ceilDivide(targetSize, bufferLength);
int targetRecycledBufferCount = Math.max(0, targetBufferCount - allocatedCount);
if (targetRecycledBufferCount < recycledCount) {
Arrays.fill(recycledBuffers, targetRecycledBufferCount, recycledCount, null);

View File

@ -93,6 +93,25 @@ public final class Util {
return o1 == null ? o2 == null : o1.equals(o2);
}
/**
* Tests whether an {@code items} array contains an object equal to {@code item}, according to
* {@link Object#equals(Object)}.
* <p>
* If {@code item} is null then true is returned if and only if {@code items} contains null.
*
* @param items The array of items to search.
* @param item The item to search for.
* @return True if the array contains an object equal to the item being searched for.
*/
public static boolean contains(Object[] items, Object item) {
for (int i = 0; i < items.length; i++) {
if (Util.areEqual(items[i], item)) {
return true;
}
}
return false;
}
/**
* Instantiates a new single threaded executor whose thread has the specified name.
*
@ -159,6 +178,28 @@ public final class Util {
return text == null ? null : text.toLowerCase(Locale.US);
}
/**
* Divides a {@code numerator} by a {@code denominator}, returning the ceiled result.
*
* @param numerator The numerator to divide.
* @param denominator The denominator to divide by.
* @return The ceiled result of the division.
*/
public static int ceilDivide(int numerator, int denominator) {
return (numerator + denominator - 1) / denominator;
}
/**
* Divides a {@code numerator} by a {@code denominator}, returning the ceiled result.
*
* @param numerator The numerator to divide.
* @param denominator The denominator to divide by.
* @return The ceiled result of the division.
*/
public static long ceilDivide(long numerator, long denominator) {
return (numerator + denominator - 1) / denominator;
}
/**
* Returns the index of the largest value in an array that is less than (or optionally equal to)
* a specified key.