Add Mp4SampleExtractor, for reading unfragmented MP4 files.

This commit is contained in:
Andrew Lewis 2015-03-13 18:10:37 +00:00
parent 6d8c4dd416
commit 5e4a35fa7e
9 changed files with 1837 additions and 11 deletions

View File

@ -48,6 +48,7 @@ public class DemoUtil {
public static final int TYPE_SS = 1;
public static final int TYPE_OTHER = 2;
public static final int TYPE_HLS = 3;
public static final int TYPE_MP4 = 4;
private static final CookieManager defaultCookieManager;

View File

@ -24,6 +24,7 @@ import com.google.android.exoplayer.demo.player.DefaultRendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.HlsRendererBuilder;
import com.google.android.exoplayer.demo.player.Mp4RendererBuilder;
import com.google.android.exoplayer.demo.player.SmoothStreamingRendererBuilder;
import com.google.android.exoplayer.demo.player.UnsupportedDrmException;
import com.google.android.exoplayer.metadata.GeobMetadata;
@ -215,6 +216,8 @@ public class PlayerActivity extends Activity implements SurfaceHolder.Callback,
new WidevineTestMediaDrmCallback(contentId), debugTextView, audioCapabilities);
case DemoUtil.TYPE_HLS:
return new HlsRendererBuilder(userAgent, contentUri.toString());
case DemoUtil.TYPE_MP4:
return new Mp4RendererBuilder(contentUri, debugTextView);
default:
return new DefaultRendererBuilder(this, contentUri, debugTextView);
}

View File

@ -135,6 +135,12 @@ import java.util.Locale;
new Sample("Apple AAC 10s", "https://devimages.apple.com.edgekey.net/"
+ "streaming/examples/bipbop_4x3/gear0/fileSequence0.aac",
DemoUtil.TYPE_OTHER),
new Sample("Big Buck Bunny (MP4)",
"http://redirector.c.youtube.com/videoplayback?id=604ed5ce52eda7ee&itag=22&source=youtube"
+ "&sparams=ip,ipbits,expire&ip=0.0.0.0&ipbits=0&expire=19000000000&signature="
+ "2E853B992F6CAB9D28CA3BEBD84A6F26709A8A55.94344B0D8BA83A7417AAD24DACC8C71A9A878ECE"
+ "&key=ik0",
DemoUtil.TYPE_MP4),
};
private Samples() {}

View File

@ -0,0 +1,69 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo.player;
import com.google.android.exoplayer.MediaCodecAudioTrackRenderer;
import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilderCallback;
import com.google.android.exoplayer.source.DefaultSampleSource;
import com.google.android.exoplayer.source.Mp4SampleExtractor;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.UriDataSource;
import android.media.MediaCodec;
import android.net.Uri;
import android.widget.TextView;
/**
* A {@link RendererBuilder} for streams that can be read using {@link Mp4SampleExtractor}.
*/
public class Mp4RendererBuilder implements RendererBuilder {
private final Uri uri;
private final TextView debugTextView;
public Mp4RendererBuilder(Uri uri, TextView debugTextView) {
this.uri = uri;
this.debugTextView = debugTextView;
}
@Override
public void buildRenderers(DemoPlayer player, RendererBuilderCallback callback) {
// Build the video and audio renderers.
DefaultSampleSource sampleSource = new DefaultSampleSource(
new Mp4SampleExtractor(new UriDataSource("exoplayer", null), new DataSpec(uri)), 2);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource,
null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, player.getMainHandler(),
player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
null, true, player.getMainHandler(), player);
// Build the debug renderer.
TrackRenderer debugRenderer = debugTextView != null
? new DebugTrackRenderer(debugTextView, videoRenderer)
: null;
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer;
callback.onRenderers(null, null, renderers);
}
}

View File

@ -0,0 +1,740 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.drm.DrmInitData;
import com.google.android.exoplayer.mp4.Atom;
import com.google.android.exoplayer.mp4.Atom.ContainerAtom;
import com.google.android.exoplayer.mp4.CommonMp4AtomParsers;
import com.google.android.exoplayer.mp4.Mp4TrackSampleTable;
import com.google.android.exoplayer.mp4.Mp4Util;
import com.google.android.exoplayer.mp4.Track;
import com.google.android.exoplayer.upstream.BufferPool;
import com.google.android.exoplayer.upstream.BufferedNonBlockingInputStream;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSourceStream;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.upstream.Loader;
import com.google.android.exoplayer.upstream.Loader.Loadable;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
/**
* Extracts data from a {@link DataSpec} in unfragmented MP4 format (ISO 14496-12).
*/
public final class Mp4SampleExtractor implements SampleExtractor, Loader.Callback {
private static final String TAG = "Mp4SampleExtractor";
private static final String LOADER_THREAD_NAME = "Mp4SampleExtractor";
// Reading results
private static final int RESULT_NEED_MORE_DATA = 1;
private static final int RESULT_END_OF_STREAM = 2;
// Parser states
private static final int STATE_READING_ATOM_HEADER = 0;
private static final int STATE_READING_ATOM_PAYLOAD = 1;
/** Set of atom types that contain data to be parsed. */
private static final Set<Integer> LEAF_ATOM_TYPES = getAtomTypeSet(
Atom.TYPE_mdhd, Atom.TYPE_mvhd, Atom.TYPE_hdlr, Atom.TYPE_vmhd, Atom.TYPE_smhd,
Atom.TYPE_stsd, Atom.TYPE_avc1, Atom.TYPE_avcC, Atom.TYPE_mp4a, Atom.TYPE_esds,
Atom.TYPE_stts, Atom.TYPE_stss, Atom.TYPE_ctts, Atom.TYPE_stsc, Atom.TYPE_stsz,
Atom.TYPE_stco, Atom.TYPE_co64, Atom.TYPE_tkhd);
/** Set of atom types that contain other atoms that need to be parsed. */
private static final Set<Integer> CONTAINER_TYPES = getAtomTypeSet(
Atom.TYPE_moov, Atom.TYPE_trak, Atom.TYPE_mdia, Atom.TYPE_minf, Atom.TYPE_stbl);
/** Default number of times to retry loading data prior to failing. */
private static final int DEFAULT_LOADABLE_RETRY_COUNT = 3;
private final DataSource dataSource;
private final DataSpec dataSpec;
private final int readAheadAllocationSize;
private final int reloadMinimumSeekDistance;
private final int maximumTrackSampleInterval;
private final int loadRetryCount;
private final BufferPool bufferPool;
private final Loader loader;
private final ParsableByteArray atomHeader;
private final Stack<Atom.ContainerAtom> containerAtoms;
private DataSourceStream dataSourceStream;
private BufferedNonBlockingInputStream inputStream;
private long inputStreamOffset;
private long rootAtomBytesRead;
private boolean loadCompleted;
private int parserState;
private int atomBytesRead;
private int atomType;
private long atomSize;
private ParsableByteArray atomData;
private boolean prepared;
private int loadErrorCount;
private Mp4Track[] tracks;
/** An exception from {@link #inputStream}'s callbacks, or {@code null} if there was no error. */
private IOException lastLoadError;
private long loadErrorPosition;
/** If handling a call to {@link #seekTo}, the new required stream offset, or -1 otherwise. */
private long pendingSeekPosition;
/** If the input stream is being reopened at a new position, the new offset, or -1 otherwise. */
private long pendingLoadPosition;
/**
* Creates a new sample extractor for reading {@code dataSource} and {@code dataSpec} as an
* unfragmented MP4 file with default settings.
*
* <p>The default settings read ahead by 5 MiB, handle maximum offsets between samples at the same
* timestamp in different tracks of 3 MiB and restart loading when seeking forward by >= 256 KiB.
*
* @param dataSource Data source used to read from {@code dataSpec}.
* @param dataSpec Data specification specifying what to read.
*/
public Mp4SampleExtractor(DataSource dataSource, DataSpec dataSpec) {
this(dataSource, dataSpec, 5 * 1024 * 1024, 3 * 1024 * 1024, 256 * 1024,
DEFAULT_LOADABLE_RETRY_COUNT);
}
/**
* Creates a new sample extractor for reading {@code dataSource} and {@code dataSpec} as an
* unfragmented MP4 file.
*
* @param dataSource Data source used to read from {@code dataSpec}.
* @param dataSpec Data specification specifying what to read.
* @param readAheadAllocationSize Size of the allocation that buffers the stream, in bytes. The
* value must exceed the maximum sample size, so that a sample can be read in its entirety.
* @param maximumTrackSampleInterval Size of the buffer that handles reading from any selected
* track. The value should be chosen so that the buffer is as big as the interval in bytes
* between the start of the earliest and the end of the latest sample required to render media
* from all selected tracks, at any timestamp in the data source.
* @param reloadMinimumSeekDistance Determines when {@code dataSource} is reopened while seeking:
* if the number of bytes between the current position and the new position is greater than or
* equal to this value, or the new position is before the current position, loading will
* restart. The value should be set to the number of bytes that can be loaded/consumed from an
* existing connection in the time it takes to start a new connection.
* @param loadableRetryCount The number of times to retry loading if an error occurs.
*/
public Mp4SampleExtractor(DataSource dataSource, DataSpec dataSpec, int readAheadAllocationSize,
int maximumTrackSampleInterval, int reloadMinimumSeekDistance, int loadableRetryCount) {
// TODO: Handle minimumTrackSampleInterval specified in time not bytes.
this.dataSource = Assertions.checkNotNull(dataSource);
this.dataSpec = Assertions.checkNotNull(dataSpec);
this.readAheadAllocationSize = readAheadAllocationSize;
this.maximumTrackSampleInterval = maximumTrackSampleInterval;
this.reloadMinimumSeekDistance = reloadMinimumSeekDistance;
this.loadRetryCount = loadableRetryCount;
// TODO: Implement Allocator here so it is possible to check there is only one buffer at a time.
bufferPool = new BufferPool(readAheadAllocationSize);
loader = new Loader(LOADER_THREAD_NAME);
atomHeader = new ParsableByteArray(Mp4Util.LONG_ATOM_HEADER_SIZE);
containerAtoms = new Stack<Atom.ContainerAtom>();
parserState = STATE_READING_ATOM_HEADER;
pendingLoadPosition = -1;
pendingSeekPosition = -1;
loadErrorPosition = -1;
}
@Override
public boolean prepare() throws IOException {
if (inputStream == null) {
loadFromOffset(0L);
}
if (!prepared) {
if (readHeaders() && !prepared) {
throw new IOException("moov atom not found.");
}
if (!prepared) {
maybeThrowLoadError();
}
}
return prepared;
}
@Override
public void selectTrack(int trackIndex) {
Assertions.checkState(prepared);
if (tracks[trackIndex].selected) {
return;
}
tracks[trackIndex].selected = true;
// Get the timestamp of the earliest currently-selected sample.
int earliestSampleTrackIndex = getTrackIndexOfEarliestCurrentSample();
if (earliestSampleTrackIndex == Mp4Util.NO_TRACK) {
tracks[trackIndex].sampleIndex = 0;
return;
}
if (earliestSampleTrackIndex == Mp4Util.NO_SAMPLE) {
tracks[trackIndex].sampleIndex = Mp4Util.NO_SAMPLE;
return;
}
long timestampUs =
tracks[earliestSampleTrackIndex].sampleTable.timestampsUs[earliestSampleTrackIndex];
// Find the latest sync sample in the new track that has an earlier or equal timestamp.
tracks[trackIndex].sampleIndex =
tracks[trackIndex].sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(timestampUs);
}
@Override
public void deselectTrack(int trackIndex) {
Assertions.checkState(prepared);
tracks[trackIndex].selected = false;
}
@Override
public long getBufferedPositionUs() {
Assertions.checkState(prepared);
if (pendingLoadPosition != -1) {
return TrackRenderer.UNKNOWN_TIME_US;
}
if (loadCompleted) {
return TrackRenderer.END_OF_TRACK_US;
}
// Get the absolute position to which there is data buffered.
long bufferedPosition =
inputStreamOffset + inputStream.getReadPosition() + inputStream.getAvailableByteCount();
// Find the timestamp of the latest sample that does not exceed the buffered position.
long latestTimestampBeforeEnd = Long.MIN_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (!tracks[trackIndex].selected) {
continue;
}
Mp4TrackSampleTable sampleTable = tracks[trackIndex].sampleTable;
int sampleIndex = Util.binarySearchFloor(sampleTable.offsets, bufferedPosition, false, true);
if (sampleIndex > 0
&& sampleTable.offsets[sampleIndex] + sampleTable.sizes[sampleIndex] > bufferedPosition) {
sampleIndex--;
}
// Update the latest timestamp if this is greater.
long timestamp = sampleTable.timestampsUs[sampleIndex];
if (timestamp > latestTimestampBeforeEnd) {
latestTimestampBeforeEnd = timestamp;
}
}
return latestTimestampBeforeEnd < 0L ? C.UNKNOWN_TIME_US : latestTimestampBeforeEnd;
}
@Override
public void seekTo(long positionUs) {
Assertions.checkState(prepared);
long earliestSamplePosition = Long.MAX_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (!tracks[trackIndex].selected) {
continue;
}
Mp4TrackSampleTable sampleTable = tracks[trackIndex].sampleTable;
int sampleIndex = sampleTable.getIndexOfEarlierOrEqualSynchronizationSample(positionUs);
if (sampleIndex == Mp4Util.NO_SAMPLE) {
sampleIndex = sampleTable.getIndexOfLaterOrEqualSynchronizationSample(positionUs);
}
tracks[trackIndex].sampleIndex = sampleIndex;
long offset = sampleTable.offsets[tracks[trackIndex].sampleIndex];
if (offset < earliestSamplePosition) {
earliestSamplePosition = offset;
}
}
pendingSeekPosition = earliestSamplePosition;
if (pendingLoadPosition != -1) {
loadFromOffset(earliestSamplePosition);
return;
}
inputStream.returnToMark();
long earliestOffset = inputStreamOffset + inputStream.getReadPosition();
long latestOffset = earliestOffset + inputStream.getAvailableByteCount();
if (earliestSamplePosition < earliestOffset
|| earliestSamplePosition >= latestOffset + reloadMinimumSeekDistance) {
loadFromOffset(earliestSamplePosition);
}
}
@Override
public int getTrackCount() {
Assertions.checkState(prepared);
return tracks.length;
}
@Override
public MediaFormat getMediaFormat(int track) {
Assertions.checkState(prepared);
return tracks[track].track.mediaFormat;
}
@Override
public DrmInitData getDrmInitData(int track) {
return null;
}
@Override
public int readSample(int trackIndex, SampleHolder sampleHolder) throws IOException {
Assertions.checkState(prepared);
Mp4Track track = tracks[trackIndex];
Assertions.checkState(track.selected);
int sampleIndex = track.sampleIndex;
// Check for the end of the stream.
if (sampleIndex == Mp4Util.NO_SAMPLE) {
// TODO: Should END_OF_STREAM be returned as soon as this track has no more samples, or as
// soon as no tracks have a sample (as implemented here)?
return hasSampleInAnySelectedTrack() ? SampleSource.NOTHING_READ : SampleSource.END_OF_STREAM;
}
// Return if the input stream will be reopened at the requested position.
if (pendingLoadPosition != -1) {
return SampleSource.NOTHING_READ;
}
// If there was a seek request, try to skip forwards to the requested position.
if (pendingSeekPosition != -1) {
int bytesToSeekPosition =
(int) (pendingSeekPosition - (inputStreamOffset + inputStream.getReadPosition()));
int skippedByteCount = inputStream.skip(bytesToSeekPosition);
if (skippedByteCount == -1) {
throw new IOException("Unexpected end-of-stream while seeking to sample.");
}
bytesToSeekPosition -= skippedByteCount;
inputStream.mark();
if (bytesToSeekPosition == 0) {
pendingSeekPosition = -1;
} else {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
}
// Return if the sample offset hasn't been loaded yet.
inputStream.returnToMark();
long sampleOffset = track.sampleTable.offsets[sampleIndex];
long seekOffsetLong = (sampleOffset - inputStreamOffset) - inputStream.getReadPosition();
Assertions.checkState(seekOffsetLong <= Integer.MAX_VALUE);
int seekOffset = (int) seekOffsetLong;
if (inputStream.skip(seekOffset) != seekOffset) {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
// Return if the sample has been loaded.
int sampleSize = track.sampleTable.sizes[sampleIndex];
if (inputStream.getAvailableByteCount() < sampleSize) {
maybeThrowLoadError();
return SampleSource.NOTHING_READ;
}
if (sampleHolder.data == null || sampleHolder.data.capacity() < sampleSize) {
sampleHolder.replaceBuffer(sampleSize);
}
ByteBuffer data = sampleHolder.data;
if (data == null) {
inputStream.skip(sampleSize);
sampleHolder.size = 0;
} else {
int bytesRead = inputStream.read(data, sampleSize);
Assertions.checkState(bytesRead == sampleSize);
if (MimeTypes.VIDEO_H264.equals(tracks[trackIndex].track.mediaFormat.mimeType)) {
// The mp4 file contains length-prefixed access units, but the decoder wants start code
// delimited content.
Mp4Util.replaceLengthPrefixesWithAvcStartCodes(sampleHolder.data, sampleSize);
}
sampleHolder.size = sampleSize;
}
// Move the input stream mark forwards if the earliest current sample was just read.
if (getTrackIndexOfEarliestCurrentSample() == trackIndex) {
inputStream.mark();
}
// TODO: Read encryption data.
sampleHolder.timeUs = track.sampleTable.timestampsUs[sampleIndex];
sampleHolder.flags = track.sampleTable.flags[sampleIndex];
// Advance to the next sample, checking if this was the last sample.
track.sampleIndex =
sampleIndex + 1 == track.sampleTable.getSampleCount() ? Mp4Util.NO_SAMPLE : sampleIndex + 1;
// Reset the loading error counter if we read past the offset at which the error was thrown.
if (dataSourceStream.getReadPosition() > loadErrorPosition) {
loadErrorCount = 0;
loadErrorPosition = -1;
}
return SampleSource.SAMPLE_READ;
}
@Override
public void release() {
pendingLoadPosition = -1;
loader.release();
if (inputStream != null) {
inputStream.close();
}
}
@Override
public void onLoadError(Loadable loadable, IOException exception) {
lastLoadError = exception;
loadErrorCount++;
if (loadErrorPosition == -1) {
loadErrorPosition = dataSourceStream.getLoadPosition();
}
int delayMs = getRetryDelayMs(loadErrorCount);
Log.w(TAG, "Retry loading (delay " + delayMs + " ms).");
loader.startLoading(dataSourceStream, this, delayMs);
}
@Override
public void onLoadCompleted(Loadable loadable) {
loadCompleted = true;
}
@Override
public void onLoadCanceled(Loadable loadable) {
if (pendingLoadPosition != -1) {
loadFromOffset(pendingLoadPosition);
pendingLoadPosition = -1;
}
}
private void loadFromOffset(long offsetBytes) {
inputStreamOffset = offsetBytes;
rootAtomBytesRead = offsetBytes;
if (loader.isLoading()) {
// Wait for loading to be canceled before proceeding.
pendingLoadPosition = offsetBytes;
loader.cancelLoading();
return;
}
if (inputStream != null) {
inputStream.close();
}
DataSpec dataSpec = new DataSpec(
this.dataSpec.uri, offsetBytes, C.LENGTH_UNBOUNDED, this.dataSpec.key);
dataSourceStream =
new DataSourceStream(dataSource, dataSpec, bufferPool, readAheadAllocationSize);
loader.startLoading(dataSourceStream, this);
// Wrap the input stream with a buffering stream so that it is possible to read from any track.
inputStream =
new BufferedNonBlockingInputStream(dataSourceStream, maximumTrackSampleInterval);
loadCompleted = false;
loadErrorCount = 0;
loadErrorPosition = -1;
}
/**
* Returns the index of the track that contains the earliest current sample, or
* {@link Mp4Util#NO_TRACK} if no track is selected, or {@link Mp4Util#NO_SAMPLE} if no samples
* remain in selected tracks.
*/
private int getTrackIndexOfEarliestCurrentSample() {
int earliestSampleTrackIndex = Mp4Util.NO_TRACK;
long earliestSampleOffset = Long.MAX_VALUE;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
Mp4Track track = tracks[trackIndex];
if (!track.selected) {
continue;
}
int sampleIndex = track.sampleIndex;
if (sampleIndex == Mp4Util.NO_SAMPLE) {
if (earliestSampleTrackIndex == Mp4Util.NO_TRACK) {
// A track is selected, but it has no more samples.
earliestSampleTrackIndex = Mp4Util.NO_SAMPLE;
}
continue;
}
long trackSampleOffset = track.sampleTable.offsets[sampleIndex];
if (trackSampleOffset < earliestSampleOffset) {
earliestSampleOffset = trackSampleOffset;
earliestSampleTrackIndex = trackIndex;
}
}
return earliestSampleTrackIndex;
}
private boolean hasSampleInAnySelectedTrack() {
boolean hasSample = false;
for (int trackIndex = 0; trackIndex < tracks.length; trackIndex++) {
if (tracks[trackIndex].selected && tracks[trackIndex].sampleIndex != Mp4Util.NO_SAMPLE) {
hasSample = true;
break;
}
}
return hasSample;
}
/** Reads headers, returning whether the end of the stream was reached. */
private boolean readHeaders() {
int results = 0;
while (!prepared && (results & (RESULT_NEED_MORE_DATA | RESULT_END_OF_STREAM)) == 0) {
switch (parserState) {
case STATE_READING_ATOM_HEADER:
results |= readAtomHeader();
break;
case STATE_READING_ATOM_PAYLOAD:
results |= readAtomPayload();
break;
}
}
return (results & RESULT_END_OF_STREAM) != 0;
}
private int readAtomHeader() {
if (pendingLoadPosition != -1) {
return RESULT_NEED_MORE_DATA;
}
// The size value is either 4 or 8 bytes long (in which case atomSize = Mp4Util.LONG_ATOM_SIZE).
int remainingBytes;
if (atomSize != Mp4Util.LONG_ATOM_SIZE) {
remainingBytes = Mp4Util.ATOM_HEADER_SIZE - atomBytesRead;
} else {
remainingBytes = Mp4Util.LONG_ATOM_HEADER_SIZE - atomBytesRead;
}
int bytesRead = inputStream.read(atomHeader.data, atomBytesRead, remainingBytes);
if (bytesRead == -1) {
return RESULT_END_OF_STREAM;
}
rootAtomBytesRead += bytesRead;
atomBytesRead += bytesRead;
if (atomBytesRead < Mp4Util.ATOM_HEADER_SIZE
|| (atomSize == Mp4Util.LONG_ATOM_SIZE && atomBytesRead < Mp4Util.LONG_ATOM_HEADER_SIZE)) {
return RESULT_NEED_MORE_DATA;
}
atomHeader.setPosition(0);
atomSize = atomHeader.readUnsignedInt();
atomType = atomHeader.readInt();
if (atomSize == Mp4Util.LONG_ATOM_SIZE) {
// The extended atom size is contained in the next 8 bytes, so try to read it now.
if (atomBytesRead < Mp4Util.LONG_ATOM_HEADER_SIZE) {
return readAtomHeader();
}
atomSize = atomHeader.readLong();
}
Integer atomTypeInteger = atomType; // Avoids boxing atomType twice.
if (CONTAINER_TYPES.contains(atomTypeInteger)) {
if (atomSize == Mp4Util.LONG_ATOM_SIZE) {
containerAtoms.add(new ContainerAtom(
atomType, rootAtomBytesRead + atomSize - Mp4Util.LONG_ATOM_HEADER_SIZE));
} else {
containerAtoms.add(new ContainerAtom(
atomType, rootAtomBytesRead + atomSize - Mp4Util.ATOM_HEADER_SIZE));
}
enterState(STATE_READING_ATOM_HEADER);
} else if (LEAF_ATOM_TYPES.contains(atomTypeInteger)) {
Assertions.checkState(atomSize <= Integer.MAX_VALUE);
atomData = new ParsableByteArray((int) atomSize);
System.arraycopy(atomHeader.data, 0, atomData.data, 0, Mp4Util.ATOM_HEADER_SIZE);
enterState(STATE_READING_ATOM_PAYLOAD);
} else {
atomData = null;
enterState(STATE_READING_ATOM_PAYLOAD);
}
return 0;
}
private int readAtomPayload() {
int bytesRead;
if (atomData != null) {
bytesRead = inputStream.read(atomData.data, atomBytesRead, (int) atomSize - atomBytesRead);
} else {
if (atomSize >= reloadMinimumSeekDistance || atomSize > Integer.MAX_VALUE) {
loadFromOffset(rootAtomBytesRead + atomSize - atomBytesRead);
onContainerAtomRead();
enterState(STATE_READING_ATOM_HEADER);
return 0;
} else {
bytesRead = inputStream.skip((int) atomSize - atomBytesRead);
}
}
if (bytesRead == -1) {
return RESULT_END_OF_STREAM;
}
rootAtomBytesRead += bytesRead;
atomBytesRead += bytesRead;
if (atomBytesRead != atomSize) {
return RESULT_NEED_MORE_DATA;
}
if (atomData != null && !containerAtoms.isEmpty()) {
containerAtoms.peek().add(new Atom.LeafAtom(atomType, atomData));
}
onContainerAtomRead();
enterState(STATE_READING_ATOM_HEADER);
return 0;
}
private void onContainerAtomRead() {
while (!containerAtoms.isEmpty() && containerAtoms.peek().endByteOffset == rootAtomBytesRead) {
Atom.ContainerAtom containerAtom = containerAtoms.pop();
if (containerAtom.type == Atom.TYPE_moov) {
processMoovAtom(containerAtom);
} else if (!containerAtoms.isEmpty()) {
containerAtoms.peek().add(containerAtom);
}
}
}
private void enterState(int state) {
switch (state) {
case STATE_READING_ATOM_HEADER:
atomBytesRead = 0;
atomSize = 0;
break;
}
parserState = state;
inputStream.mark();
}
/** Updates the stored track metadata to reflect the contents on the specified moov atom. */
private void processMoovAtom(Atom.ContainerAtom moov) {
List<Mp4Track> tracks = new ArrayList<Mp4Track>();
long earliestSampleOffset = Long.MAX_VALUE;
for (int i = 0; i < moov.containerChildren.size(); i++) {
Atom.ContainerAtom atom = moov.containerChildren.get(i);
if (atom.type != Atom.TYPE_trak) {
continue;
}
Track track = CommonMp4AtomParsers.parseTrak(atom, moov.getLeafAtomOfType(Atom.TYPE_mvhd));
if (track.type != Track.TYPE_AUDIO && track.type != Track.TYPE_VIDEO) {
continue;
}
Atom.ContainerAtom stblAtom = atom.getContainerAtomOfType(Atom.TYPE_mdia)
.getContainerAtomOfType(Atom.TYPE_minf).getContainerAtomOfType(Atom.TYPE_stbl);
Mp4TrackSampleTable trackSampleTable = CommonMp4AtomParsers.parseStbl(track, stblAtom);
if (trackSampleTable.getSampleCount() == 0) {
continue;
}
tracks.add(new Mp4Track(track, trackSampleTable));
// Keep track of the byte offset of the earliest sample.
long firstSampleOffset = trackSampleTable.offsets[0];
if (firstSampleOffset < earliestSampleOffset) {
earliestSampleOffset = firstSampleOffset;
}
}
this.tracks = tracks.toArray(new Mp4Track[0]);
if (earliestSampleOffset < inputStream.getReadPosition()) {
loadFromOffset(earliestSampleOffset);
}
prepared = true;
}
/** Returns an unmodifiable set of atom types. */
private static Set<Integer> getAtomTypeSet(int... atomTypes) {
Set<Integer> atomTypeSet = new HashSet<Integer>();
for (int atomType : atomTypes) {
atomTypeSet.add(atomType);
}
return Collections.unmodifiableSet(atomTypeSet);
}
private int getRetryDelayMs(int errorCount) {
return Math.min((errorCount - 1) * 1000, 5000);
}
private void maybeThrowLoadError() throws IOException {
if (loadErrorCount > loadRetryCount) {
throw lastLoadError;
}
}
private static final class Mp4Track {
public final Track track;
public final Mp4TrackSampleTable sampleTable;
public boolean selected;
public int sampleIndex;
public Mp4Track(Track track, Mp4TrackSampleTable sampleTable) {
this.track = track;
this.sampleTable = sampleTable;
}
}
}

View File

@ -0,0 +1,150 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import com.google.android.exoplayer.util.Assertions;
import java.nio.ByteBuffer;
/**
* Input stream with non-blocking reading/skipping that also stores read/skipped data in a buffer.
* Call {@link #mark} to discard any buffered data before the current reading position. Call
* {@link #returnToMark} to move the current reading position back to the marked position, which is
* initially the start of the input stream.
*/
public final class BufferedNonBlockingInputStream implements NonBlockingInputStream {
private final NonBlockingInputStream inputStream;
private final byte[] bufferedBytes;
private long inputStreamPosition;
private int readPosition;
private int writePosition;
/**
* Wraps the specified {@code nonBlockingInputStream} for buffered reading using a buffer of size
* {@code bufferSize} bytes.
*/
public BufferedNonBlockingInputStream(
NonBlockingInputStream nonBlockingInputStream, int bufferSize) {
inputStream = Assertions.checkNotNull(nonBlockingInputStream);
bufferedBytes = new byte[bufferSize];
}
@Override
public int skip(int length) {
return consumeStream(null, null, 0, length);
}
@Override
public int read(byte[] buffer, int offset, int length) {
return consumeStream(null, buffer, offset, length);
}
@Override
public int read(ByteBuffer buffer, int length) {
return consumeStream(buffer, null, 0, length);
}
@Override
public long getAvailableByteCount() {
// The amount that can be read from the input stream is limited by how much can be buffered.
return (writePosition - readPosition)
+ Math.min(inputStream.getAvailableByteCount(), bufferedBytes.length - writePosition);
}
@Override
public boolean isEndOfStream() {
return writePosition == readPosition && inputStream.isEndOfStream();
}
@Override
public void close() {
inputStream.close();
inputStreamPosition = -1;
}
/** Returns the current position in the stream. */
public long getReadPosition() {
return inputStreamPosition - (writePosition - readPosition);
}
/**
* Moves the mark to be at the current position. Any data before the current position is
* discarded. After calling this method, calling {@link #returnToMark} will move the reading
* position back to the mark position.
*/
public void mark() {
System.arraycopy(bufferedBytes, readPosition, bufferedBytes, 0, writePosition - readPosition);
writePosition -= readPosition;
readPosition = 0;
}
/** Moves the current position back to the mark position. */
public void returnToMark() {
readPosition = 0;
}
/**
* Reads or skips data from the input stream. If {@code byteBuffer} is non-{@code null}, reads
* {@code length} bytes into {@code byteBuffer} (other arguments are ignored). If
* {@code byteArray} is non-{@code null}, reads {@code length} bytes into {@code byteArray} at
* {@code offset} (other arguments are ignored). Otherwise, skips {@code length} bytes.
*
* @param byteBuffer {@link ByteBuffer} to read into, or {@code null} to read into
* {@code byteArray} or skip.
* @param byteArray Byte array to read into, or {@code null} to read into {@code byteBuffer} or
* skip.
* @param offset Offset in {@code byteArray} to write to, if it is non-{@code null}.
* @param length Number of bytes to read or skip.
* @return The number of bytes consumed, or -1 if nothing was consumed and the end of stream was
* reached.
*/
private int consumeStream(ByteBuffer byteBuffer, byte[] byteArray, int offset, int length) {
// If necessary, reduce length so that we do not need to write past the end of the array.
int pendingBytes = writePosition - readPosition;
length = Math.min(length, bufferedBytes.length - writePosition + pendingBytes);
// If reading past the end of buffered data, request more and populate the buffer.
int streamBytesRead = 0;
if (length - pendingBytes > 0) {
streamBytesRead = inputStream.read(bufferedBytes, writePosition, length - pendingBytes);
if (streamBytesRead > 0) {
inputStreamPosition += streamBytesRead;
writePosition += streamBytesRead;
pendingBytes += streamBytesRead;
}
}
// Signal the end of the stream if nothing more will be read.
if (streamBytesRead == -1 && pendingBytes == 0) {
return -1;
}
// Fill the buffer using buffered data if reading, or just skip otherwise.
length = Math.min(pendingBytes, length);
if (byteBuffer != null) {
byteBuffer.put(bufferedBytes, readPosition, length);
} else if (byteArray != null) {
System.arraycopy(bufferedBytes, readPosition, byteArray, offset, length);
}
readPosition += length;
return length;
}
}

View File

@ -47,6 +47,10 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
private final Allocator allocator;
private final ReadHead readHead;
/** Whether {@link #allocation}'s capacity is fixed. If true, the allocation is not resized. */
private final boolean isAllocationFixedSize;
private final int allocationSize;
private Allocation allocation;
private volatile boolean loadCanceled;
@ -58,6 +62,9 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
private int writeFragmentRemainingLength;
/**
* Constructs an instance whose allocation grows to contain all of the data specified by the
* {@code dataSpec}.
*
* @param dataSource The source from which the data should be loaded.
* @param dataSpec Defines the data to be loaded. {@code dataSpec.length} must not exceed
* {@link Integer#MAX_VALUE}. If {@code dataSpec.length == C.LENGTH_UNBOUNDED} then
@ -72,12 +79,48 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
this.allocator = allocator;
resolvedLength = C.LENGTH_UNBOUNDED;
readHead = new ReadHead();
isAllocationFixedSize = false;
allocationSize = 0;
}
/**
* Constructs an instance whose allocation is of a fixed size, which may be smaller than the data
* specified by the {@code dataSpec}.
* <p>
* The allocation size determines how far ahead loading can proceed relative to the current
* reading position.
*
* @param dataSource The source form which the data should be loaded.
* @param dataSpec Defines the data to be loaded.
* @param allocator Used to obtain an {@link Allocation} for holding the data.
* @param allocationSize The minimum size for a fixed-size allocation that will hold the data
* loaded from {@code dataSource}.
*/
public DataSourceStream(
DataSource dataSource, DataSpec dataSpec, Allocator allocator, int allocationSize) {
Assertions.checkState(dataSpec.length <= Integer.MAX_VALUE);
this.dataSource = dataSource;
this.dataSpec = dataSpec;
this.allocator = allocator;
this.allocationSize = allocationSize;
resolvedLength = C.LENGTH_UNBOUNDED;
readHead = new ReadHead();
isAllocationFixedSize = true;
}
/**
* Resets the read position to the start of the data.
*
* @throws UnsupportedOperationException Thrown if the allocation size is fixed.
*/
public void resetReadPosition() {
if (isAllocationFixedSize) {
throw new UnsupportedOperationException(
"The read position cannot be reset when using a fixed allocation");
}
readHead.reset();
}
@ -176,7 +219,12 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
byte[][] buffers = allocation.getBuffers();
while (bytesRead < bytesToRead) {
if (readHead.fragmentRemaining == 0) {
readHead.fragmentIndex++;
if (readHead.fragmentIndex == buffers.length - 1) {
Assertions.checkState(isAllocationFixedSize);
readHead.fragmentIndex = 0;
} else {
readHead.fragmentIndex++;
}
readHead.fragmentOffset = allocation.getFragmentOffset(readHead.fragmentIndex);
readHead.fragmentRemaining = allocation.getFragmentLength(readHead.fragmentIndex);
}
@ -194,6 +242,13 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
readHead.fragmentRemaining -= bufferReadLength;
}
if (isAllocationFixedSize) {
synchronized (readHead) {
// Notify load() of the updated position so it can resume.
readHead.notify();
}
}
return bytesRead;
}
@ -210,6 +265,7 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
@Override
@SuppressWarnings("NonAtomicVolatileUpdate")
public void load() throws IOException, InterruptedException {
if (loadCanceled || isLoadFinished()) {
// The load was canceled, or is already complete.
@ -221,7 +277,7 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
if (loadPosition == 0 && resolvedLength == C.LENGTH_UNBOUNDED) {
loadDataSpec = dataSpec;
long resolvedLength = dataSource.open(loadDataSpec);
if (resolvedLength > Integer.MAX_VALUE) {
if (!isAllocationFixedSize && resolvedLength > Integer.MAX_VALUE) {
throw new DataSourceStreamLoadException(
new UnexpectedLengthException(dataSpec.length, resolvedLength));
}
@ -235,9 +291,13 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
if (allocation == null) {
int initialAllocationSize = resolvedLength != C.LENGTH_UNBOUNDED
? (int) resolvedLength : CHUNKED_ALLOCATION_INCREMENT;
allocation = allocator.allocate(initialAllocationSize);
if (isAllocationFixedSize) {
allocation = allocator.allocate(allocationSize);
} else {
int initialAllocationSize = resolvedLength != C.LENGTH_UNBOUNDED
? (int) resolvedLength : CHUNKED_ALLOCATION_INCREMENT;
allocation = allocator.allocate(initialAllocationSize);
}
}
int allocationCapacity = allocation.capacity();
@ -253,18 +313,25 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
if (Thread.interrupted()) {
throw new InterruptedException();
}
read = dataSource.read(buffers[writeFragmentIndex], writeFragmentOffset,
writeFragmentRemainingLength);
int bytesToWrite = getBytesToWrite();
read = dataSource.read(buffers[writeFragmentIndex], writeFragmentOffset, bytesToWrite);
if (read > 0) {
loadPosition += read;
writeFragmentOffset += read;
writeFragmentRemainingLength -= read;
if (writeFragmentRemainingLength == 0 && maybeMoreToLoad()) {
writeFragmentIndex++;
if (loadPosition == allocationCapacity) {
allocation.ensureCapacity(allocationCapacity + CHUNKED_ALLOCATION_INCREMENT);
allocationCapacity = allocation.capacity();
buffers = allocation.getBuffers();
if (writeFragmentIndex == buffers.length) {
if (isAllocationFixedSize) {
// Wrap back to the first fragment.
writeFragmentIndex = 0;
} else {
// Grow the allocation.
allocation.ensureCapacity(allocationCapacity + CHUNKED_ALLOCATION_INCREMENT);
allocationCapacity = allocation.capacity();
buffers = allocation.getBuffers();
}
}
writeFragmentOffset = allocation.getFragmentOffset(writeFragmentIndex);
writeFragmentRemainingLength = allocation.getFragmentLength(writeFragmentIndex);
@ -281,6 +348,25 @@ public final class DataSourceStream implements Loadable, NonBlockingInputStream
}
}
/**
* Returns the number of bytes that can be written to the current fragment, blocking until the
* reader has consumed data if the allocation has a fixed size and is full.
*/
private int getBytesToWrite() throws InterruptedException {
if (!isAllocationFixedSize) {
return writeFragmentRemainingLength;
}
synchronized (readHead) {
while (loadPosition == readHead.position + allocation.capacity()) {
readHead.wait();
}
}
return Math.min(writeFragmentRemainingLength,
allocation.capacity() - (int) (loadPosition - readHead.position));
}
private boolean maybeMoreToLoad() {
return resolvedLength == C.LENGTH_UNBOUNDED || loadPosition < resolvedLength;
}

View File

@ -0,0 +1,622 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.source;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.SampleHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.mp4.Atom;
import com.google.android.exoplayer.upstream.ByteArrayDataSource;
import com.google.android.exoplayer.upstream.DataSource;
import com.google.android.exoplayer.upstream.DataSpec;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.MediaExtractor;
import android.net.Uri;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import junit.framework.TestCase;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* Tests for {@link Mp4SampleExtractor}.
*/
@TargetApi(16)
public class Mp4SampleExtractorTest extends TestCase {
/** String of hexadecimal bytes containing the video stsd payload from an AVC video. */
private static final byte[] VIDEO_STSD_PAYLOAD = getByteArray(
"00000000000000010000009961766331000000000000000100000000000000000000000000000000050002"
+ "d00048000000480000000000000001000000000000000000000000000000000000000000000000000000"
+ "00000000000018ffff0000002f617663430164001fffe100186764001facb402802dd808800000030080"
+ "00001e078c195001000468ee3cb000000014627472740000e35c0042a61000216cb8");
private static final byte[] VIDEO_HDLR_PAYLOAD = getByteArray("000000000000000076696465");
private static final byte[] VIDEO_MDHD_PAYLOAD =
getByteArray("0000000000000000cf6c48890000001e00001c8a55c40000");
private static final int TIMESCALE = 30;
private static final int VIDEO_WIDTH = 1280;
private static final int VIDEO_HEIGHT = 720;
/** String of hexadecimal bytes containing the video stsd payload for an mp4v track. */
private static final byte[] VIDEO_STSD_MP4V_PAYLOAD = getByteArray(
"0000000000000001000000A36D703476000000000000000100000000000000000000000000000000014000"
+ "B40048000000480000000000000001000000000000000000000000000000000000000000000000000000"
+ "00000000000018FFFF0000004D6573647300000000033F00000004372011001A400004CF280002F11805"
+ "28000001B001000001B58913000001000000012000C48D8800F50A04169463000001B2476F6F676C6506"
+ "0102");
private static final int VIDEO_MP4V_WIDTH = 320;
private static final int VIDEO_MP4V_HEIGHT = 180;
/** String of hexadecimal bytes containing the audio stsd payload from an AAC track. */
private static final byte[] AUDIO_STSD_PAYLOAD = getByteArray(
"0000000000000001000000596d703461000000000000000100000000000000000001001000000000ac4400"
+ "000000003565736473000000000327000000041f401500023e00024bc000023280051012080000000000"
+ "000000000000000000060102");
private static final byte[] AUDIO_HDLR_PAYLOAD = getByteArray("0000000000000000736f756e");
private static final byte[] AUDIO_MDHD_PAYLOAD =
getByteArray("00000000cf6c4889cf6c488a0000ac4400a3e40055c40000");
/** String of hexadecimal bytes containing an mvhd payload from an AVC/AAC video. */
private static final byte[] MVHD_PAYLOAD = getByteArray(
"00000000cf6c4888cf6c48880000025800023ad40001000001000000000000000000000000010000000000"
+ "000000000000000000000100000000000000000000000000004000000000000000000000000000000000"
+ "000000000000000000000000000003");
/** String of hexadecimal bytes containing a tkhd payload with an unknown duration. */
private static final byte[] TKHD_PAYLOAD =
getByteArray("0000000000000000000000000000000000000000FFFFFFFF");
/** Video frame timestamps in time units. */
private static final int[] SAMPLE_TIMESTAMPS = {0, 2, 3, 5, 6, 7};
/** Video frame sizes in bytes, including a very large sample. */
private static final int[] SAMPLE_SIZES = {100, 20, 20, 44, 100, 1 * 1024 * 1024};
/** Indices of key-frames. */
private static final int[] SYNCHRONIZATION_SAMPLE_INDICES = {0, 4, 5};
/** Indices of video frame chunk offsets. */
private static final int[] CHUNK_OFFSETS = {1000, 2000, 3000, 4000};
/** Numbers of video frames in each chunk. */
private static final int[] SAMPLES_IN_CHUNK = {2, 2, 1, 1};
/** The mdat box must be large enough to avoid reading chunk sample data out of bounds. */
private static final int MDAT_SIZE = 10 * 1024 * 1024;
/** Fake HTTP URI that can't be opened. */
private static final Uri FAKE_URI = Uri.parse("http://");
/** Empty byte array. */
private static final byte[] EMPTY = new byte[0];
public void testParsesValidMp4File() throws Exception {
// Given an extractor with an AVC/AAC file
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The MIME type and metadata are set correctly.
assertEquals(MimeTypes.VIDEO_H264, extractor.mediaFormats[0].mimeType);
assertEquals(MimeTypes.AUDIO_AAC, extractor.mediaFormats[1].mimeType);
assertEquals(VIDEO_WIDTH, extractor.selectedTrackMediaFormat.width);
assertEquals(VIDEO_HEIGHT, extractor.selectedTrackMediaFormat.height);
}
public void testParsesValidMp4vFile() throws Exception {
// Given an extractor with an mp4v file
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, true /* mp4vFormat */));
// The MIME type and metadata are set correctly.
assertEquals(MimeTypes.VIDEO_MP4V, extractor.selectedTrackMediaFormat.mimeType);
assertEquals(VIDEO_MP4V_WIDTH, extractor.selectedTrackMediaFormat.width);
assertEquals(VIDEO_MP4V_HEIGHT, extractor.selectedTrackMediaFormat.height);
}
public void testSampleTimestampsMatch() throws Exception {
// Given an extractor
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The timestamps are set correctly.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
for (int i = 0; i < SAMPLE_TIMESTAMPS.length; i++) {
extractor.readSample(0, sampleHolder);
assertEquals(getVideoTimestampUs(SAMPLE_TIMESTAMPS[i]), sampleHolder.timeUs);
}
assertEquals(SampleSource.END_OF_STREAM, extractor.readSample(0, sampleHolder));
}
public void testSeekToStart() throws Exception {
// When seeking to the start
int timestampTimeUnits = SAMPLE_TIMESTAMPS[0];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits));
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToEnd() throws Exception {
// When seeking to the end
int timestampTimeUnits = SAMPLE_TIMESTAMPS[SAMPLE_TIMESTAMPS.length - 1];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits));
// The timestamp is at the end.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToNearStart() throws Exception {
// When seeking to just after the start
int timestampTimeUnits = SAMPLE_TIMESTAMPS[0];
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(timestampTimeUnits) + 1);
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(timestampTimeUnits), sampleTimestampUs);
}
public void testSeekToBeforeLastSynchronizationSample() throws Exception {
// When seeking to just after the start
long sampleTimestampUs =
getTimestampUsResultingFromSeek(getVideoTimestampUs(SAMPLE_TIMESTAMPS[4]) - 1);
// The timestamp is at the start.
assertEquals(getVideoTimestampUs(SAMPLE_TIMESTAMPS[0]), sampleTimestampUs);
}
public void testAllSamplesAreSynchronizationSamplesWhenStssIsMissing() throws Exception {
// Given an extractor without an stss box
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(false /* includeStss */, false /* mp4vFormat */));
// All samples are synchronization samples.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
int sampleIndex = 0;
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
assertTrue((sampleHolder.flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0);
sampleHolder.clearData();
sampleIndex++;
} else if (result == SampleSource.END_OF_STREAM) {
break;
}
}
assertTrue(sampleIndex == SAMPLE_SIZES.length);
}
public void testReadAllSamplesSucceeds() throws Exception {
// Given an extractor
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
// The sample sizes are set correctly.
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
int sampleIndex = 0;
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
assertEquals(SAMPLE_SIZES[sampleIndex], sampleHolder.size);
sampleHolder.clearData();
sampleIndex++;
} else if (result == SampleSource.END_OF_STREAM) {
break;
}
}
assertEquals(SAMPLE_SIZES.length, sampleIndex);
}
/** Returns the sample time read after seeking to {@code timestampTimeUnits}. */
private static long getTimestampUsResultingFromSeek(long timestampTimeUnits) throws Exception {
Mp4ExtractorWrapper extractor =
prepareSampleExtractor(getFakeDataSource(true /* includeStss */, false /* mp4vFormat */));
extractor.seekTo(timestampTimeUnits);
SampleHolder sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL);
while (true) {
int result = extractor.readSample(0, sampleHolder);
if (result == SampleSource.SAMPLE_READ) {
return sampleHolder.timeUs;
} else if (result == SampleSource.END_OF_STREAM) {
return -1;
}
}
}
private static Mp4ExtractorWrapper prepareSampleExtractor(DataSource dataSource)
throws Exception {
Mp4ExtractorWrapper extractor = new Mp4ExtractorWrapper(dataSource);
extractor.prepare();
return extractor;
}
/** Returns a video timestamp in microseconds corresponding to {@code timeUnits}. */
private static long getVideoTimestampUs(int timeUnits) {
return Util.scaleLargeTimestamp(timeUnits, C.MICROS_PER_SECOND, TIMESCALE);
}
private static byte[] getStco() {
byte[] result = new byte[4 + 4 + 4 * CHUNK_OFFSETS.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(CHUNK_OFFSETS.length);
for (int chunkOffset : CHUNK_OFFSETS) {
buffer.putInt(chunkOffset);
}
return result;
}
private static byte[] getStsc() {
int samplesPerChunk = -1;
List<Integer> samplesInChunkChangeIndices = new ArrayList<Integer>();
for (int i = 0; i < SAMPLES_IN_CHUNK.length; i++) {
if (SAMPLES_IN_CHUNK[i] != samplesPerChunk) {
samplesInChunkChangeIndices.add(i);
samplesPerChunk = SAMPLES_IN_CHUNK[i];
}
}
byte[] result = new byte[4 + 4 + 3 * 4 * samplesInChunkChangeIndices.size()];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(samplesInChunkChangeIndices.size());
for (int index : samplesInChunkChangeIndices) {
buffer.putInt(index + 1);
buffer.putInt(SAMPLES_IN_CHUNK[index]);
buffer.putInt(0); // Sample description index (skipped)
}
return result;
}
private static byte[] getStsz() {
byte[] result = new byte[4 + 4 + 4 + 4 * SAMPLE_SIZES.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(0); // No fixed sample size.
buffer.putInt(SAMPLE_SIZES.length);
for (int size : SAMPLE_SIZES) {
buffer.putInt(size);
}
return result;
}
private static byte[] getStss() {
byte[] result = new byte[4 + 4 + 4 * SYNCHRONIZATION_SAMPLE_INDICES.length];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped)
buffer.putInt(SYNCHRONIZATION_SAMPLE_INDICES.length);
for (int synchronizationSampleIndex : SYNCHRONIZATION_SAMPLE_INDICES) {
buffer.putInt(synchronizationSampleIndex + 1);
}
return result;
}
private static byte[] getStts() {
int sampleTimestampDeltaChanges = 0;
int currentSampleTimestampDelta = -1;
for (int i = 1; i < SAMPLE_TIMESTAMPS.length; i++) {
int timestampDelta = SAMPLE_TIMESTAMPS[i] - SAMPLE_TIMESTAMPS[i - 1];
if (timestampDelta != currentSampleTimestampDelta) {
sampleTimestampDeltaChanges++;
currentSampleTimestampDelta = timestampDelta;
}
}
byte[] result = new byte[4 + 4 + 2 * 4 * sampleTimestampDeltaChanges];
ByteBuffer buffer = ByteBuffer.wrap(result);
buffer.putInt(0); // Version (skipped);
buffer.putInt(sampleTimestampDeltaChanges);
int lastTimestampDeltaChangeIndex = 1;
currentSampleTimestampDelta = SAMPLE_TIMESTAMPS[1] - SAMPLE_TIMESTAMPS[0];
for (int i = 2; i < SAMPLE_TIMESTAMPS.length; i++) {
int timestampDelta = SAMPLE_TIMESTAMPS[i] - SAMPLE_TIMESTAMPS[i - 1];
if (timestampDelta != currentSampleTimestampDelta) {
buffer.putInt(i - lastTimestampDeltaChangeIndex);
lastTimestampDeltaChangeIndex = i;
buffer.putInt(currentSampleTimestampDelta);
currentSampleTimestampDelta = timestampDelta;
}
}
// The last sample also has a duration, so the number of entries is the number of samples.
buffer.putInt(SAMPLE_TIMESTAMPS.length - lastTimestampDeltaChangeIndex + 1);
buffer.putInt(currentSampleTimestampDelta);
return result;
}
private static byte[] getMdat() {
// TODO: Put NAL length tags in at each sample position so the sample lengths don't have to
// be multiples of four.
return new byte[MDAT_SIZE];
}
private static final DataSource getFakeDataSource(boolean includeStss, boolean mp4vFormat) {
return new ByteArrayDataSource(includeStss
? getTestMp4File(mp4vFormat) : getTestMp4FileWithoutSynchronizationData(mp4vFormat));
}
/** Gets a valid MP4 file with audio/video tracks and synchronization data. */
private static byte[] getTestMp4File(boolean mp4vFormat) {
return Mp4Atom.serialize(
atom(Atom.TYPE_ftyp, EMPTY),
atom(Atom.TYPE_moov,
atom(Atom.TYPE_mvhd, MVHD_PAYLOAD),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, VIDEO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, VIDEO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd,
mp4vFormat ? VIDEO_STSD_MP4V_PAYLOAD : VIDEO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stss, getStss()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco()))))),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, AUDIO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, AUDIO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd, AUDIO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stss, getStss()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco())))))),
atom(Atom.TYPE_mdat, getMdat()));
}
/** Gets a valid MP4 file with audio/video tracks and without a synchronization table. */
private static byte[] getTestMp4FileWithoutSynchronizationData(boolean mp4vFormat) {
return Mp4Atom.serialize(
atom(Atom.TYPE_ftyp, EMPTY),
atom(Atom.TYPE_moov,
atom(Atom.TYPE_mvhd, MVHD_PAYLOAD),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, VIDEO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, VIDEO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd,
mp4vFormat ? VIDEO_STSD_MP4V_PAYLOAD : VIDEO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco()))))),
atom(Atom.TYPE_trak,
atom(Atom.TYPE_tkhd, TKHD_PAYLOAD),
atom(Atom.TYPE_mdia,
atom(Atom.TYPE_mdhd, AUDIO_MDHD_PAYLOAD),
atom(Atom.TYPE_hdlr, AUDIO_HDLR_PAYLOAD),
atom(Atom.TYPE_minf,
atom(Atom.TYPE_vmhd, EMPTY),
atom(Atom.TYPE_stbl,
atom(Atom.TYPE_stsd, AUDIO_STSD_PAYLOAD),
atom(Atom.TYPE_stts, getStts()),
atom(Atom.TYPE_stsc, getStsc()),
atom(Atom.TYPE_stsz, getStsz()),
atom(Atom.TYPE_stco, getStco())))))),
atom(Atom.TYPE_mdat, getMdat()));
}
private static Mp4Atom atom(int type, Mp4Atom... containedMp4Atoms) {
return new Mp4Atom(type, containedMp4Atoms);
}
private static Mp4Atom atom(int type, byte[] payload) {
return new Mp4Atom(type, payload);
}
private static byte[] getByteArray(String hexBytes) {
byte[] result = new byte[hexBytes.length() / 2];
for (int i = 0; i < result.length; i++) {
result[i] = (byte) ((Character.digit(hexBytes.charAt(i * 2), 16) << 4)
+ Character.digit(hexBytes.charAt(i * 2 + 1), 16));
}
return result;
}
/** MP4 atom that can be serialized as a byte array. */
private static final class Mp4Atom {
public static byte[] serialize(Mp4Atom... atoms) {
int size = 0;
for (Mp4Atom atom : atoms) {
size += atom.getSize();
}
ByteBuffer buffer = ByteBuffer.allocate(size);
for (Mp4Atom atom : atoms) {
atom.getData(buffer);
}
return buffer.array();
}
private static final int HEADER_SIZE = 8;
private final int type;
private final Mp4Atom[] containedMp4Atoms;
private final byte[] payload;
private Mp4Atom(int type, Mp4Atom... containedMp4Atoms) {
this.type = type;
this.containedMp4Atoms = containedMp4Atoms;
payload = null;
}
private Mp4Atom(int type, byte[] payload) {
this.type = type;
this.payload = payload;
containedMp4Atoms = null;
}
private int getSize() {
int size = HEADER_SIZE;
if (payload != null) {
size += payload.length;
} else {
for (Mp4Atom atom : containedMp4Atoms) {
size += atom.getSize();
}
}
return size;
}
private void getData(ByteBuffer byteBuffer) {
byteBuffer.putInt(getSize());
byteBuffer.putInt(type);
if (payload != null) {
byteBuffer.put(payload);
} else {
for (Mp4Atom atom : containedMp4Atoms) {
atom.getData(byteBuffer);
}
}
}
}
/**
* Creates a {@link Mp4SampleExtractor} on a separate thread with a looper, so that it can use a
* handler for loading, and provides blocking operations like {@link #seekTo} and
* {@link #readSample}.
*/
private static final class Mp4ExtractorWrapper extends Thread {
private static final int MSG_PREPARE = 0;
private static final int MSG_SEEK_TO = 1;
private static final int MSG_READ_SAMPLE = 2;
private final DataSource dataSource;
// Written by the handler's thread and read by the main thread.
public volatile MediaFormat[] mediaFormats;
public volatile MediaFormat selectedTrackMediaFormat;
private volatile Handler handler;
private volatile int readSampleResult;
private volatile Exception exception;
private volatile CountDownLatch pendingOperationLatch;
public Mp4ExtractorWrapper(DataSource dataSource) {
super("Mp4SampleExtractorTest");
this.dataSource = Assertions.checkNotNull(dataSource);
pendingOperationLatch = new CountDownLatch(1);
start();
}
public void prepare() throws Exception {
// Block until the handler has been created.
pendingOperationLatch.await();
// Block until the extractor has been prepared.
pendingOperationLatch = new CountDownLatch(1);
handler.sendEmptyMessage(MSG_PREPARE);
pendingOperationLatch.await();
if (exception != null) {
throw exception;
}
}
public void seekTo(long timestampUs) {
handler.obtainMessage(MSG_SEEK_TO, timestampUs).sendToTarget();
}
public int readSample(int trackIndex, SampleHolder sampleHolder) throws Exception {
// Block until the extractor has completed readSample.
pendingOperationLatch = new CountDownLatch(1);
handler.obtainMessage(MSG_READ_SAMPLE, trackIndex, 0, sampleHolder).sendToTarget();
pendingOperationLatch.await();
if (exception != null) {
throw exception;
}
return readSampleResult;
}
@SuppressLint("HandlerLeak")
@Override
public void run() {
final Mp4SampleExtractor mp4SampleExtractor =
new Mp4SampleExtractor(dataSource, new DataSpec(FAKE_URI));
Looper.prepare();
handler = new Handler() {
@Override
public void handleMessage(Message message) {
try {
switch (message.what) {
case MSG_PREPARE:
if (!mp4SampleExtractor.prepare()) {
sendEmptyMessage(MSG_PREPARE);
} else {
// Select the video track and get its metadata.
mediaFormats = new MediaFormat[mp4SampleExtractor.getTrackCount()];
for (int track = 0; track < mp4SampleExtractor.getTrackCount(); track++) {
MediaFormat mediaFormat = mp4SampleExtractor.getMediaFormat(track);
mediaFormats[track] = mediaFormat;
if (MimeTypes.isVideo(mediaFormat.mimeType)) {
mp4SampleExtractor.selectTrack(track);
selectedTrackMediaFormat = mediaFormat;
}
}
pendingOperationLatch.countDown();
}
break;
case MSG_SEEK_TO:
long timestampUs = (long) message.obj;
mp4SampleExtractor.seekTo(timestampUs);
break;
case MSG_READ_SAMPLE:
int trackIndex = message.arg1;
SampleHolder sampleHolder = (SampleHolder) message.obj;
sampleHolder.clearData();
readSampleResult = mp4SampleExtractor.readSample(trackIndex, sampleHolder);
if (readSampleResult == SampleSource.NOTHING_READ) {
Message.obtain(message).sendToTarget();
return;
}
pendingOperationLatch.countDown();
break;
}
} catch (Exception e) {
exception = e;
pendingOperationLatch.countDown();
}
}
};
// Unblock waiting for the handler.
pendingOperationLatch.countDown();
Looper.loop();
}
}
}

View File

@ -0,0 +1,149 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.upstream;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.android.exoplayer.SampleSource;
import junit.framework.TestCase;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.Arrays;
/**
* Tests for {@link BufferedNonBlockingInputStream}.
*/
public class BufferedNonBlockingInputStreamTest extends TestCase {
private static final int BUFFER_SIZE_BYTES = 16;
@Mock private NonBlockingInputStream mockInputStream;
private BufferedNonBlockingInputStream bufferedInputStream;
@Override
public void setUp() {
MockitoAnnotations.initMocks(this);
bufferedInputStream = new BufferedNonBlockingInputStream(mockInputStream, BUFFER_SIZE_BYTES);
}
public void testSkipClipsCountToBufferSizeWhenMarkSet() {
// When marking and skipping more than the buffer size
bufferedInputStream.mark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES + 1);
// Then BUFFER_SIZE_BYTES are read.
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
}
public void testSkipResetSkipUsesBufferedData() {
// Given a buffered input stream that has already read BUFFER_SIZE_BYTES
stubInputStreamForReadingBytes();
bufferedInputStream.mark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
// When resetting and reading the same amount, no extra data are read.
bufferedInputStream.returnToMark();
bufferedInputStream.skip(BUFFER_SIZE_BYTES);
verify(mockInputStream).read((byte[]) any(), eq(0), eq(BUFFER_SIZE_BYTES));
}
public void testReturnsEndOfStreamAfterBufferedData() {
// Given a buffered input stream that has read 1 byte (to end-of-stream) and has been reset
stubInputStreamForReadingBytes();
bufferedInputStream.mark();
bufferedInputStream.skip(1);
stubInputStreamForReadingEndOfStream();
bufferedInputStream.returnToMark();
// When skipping, first 1 byte is returned, then end-of-stream.
assertEquals(1, bufferedInputStream.skip(1));
assertEquals(SampleSource.END_OF_STREAM, bufferedInputStream.skip(1));
}
public void testReadAtOffset() {
// Given a mock input stream that provide non-zero data
stubInputStreamForReadingBytes();
// When reading a byte at offset 1
byte[] bytes = new byte[2];
bufferedInputStream.mark();
bufferedInputStream.read(bytes, 1, 1);
// Then only the second byte is set.
assertTrue(Arrays.equals(new byte[] {(byte) 0, (byte) 0xFF}, bytes));
}
public void testSkipAfterMark() {
// Given a mock input stream that provides non-zero data, with three bytes read
stubInputStreamForReadingBytes();
bufferedInputStream.skip(1);
bufferedInputStream.mark();
bufferedInputStream.skip(2);
bufferedInputStream.returnToMark();
// Then it is possible to skip one byte after the mark and read two bytes.
assertEquals(1, bufferedInputStream.skip(1));
assertEquals(2, bufferedInputStream.read(new byte[2], 0, 2));
verify(mockInputStream).read((byte[]) any(), eq(0), eq(1));
verify(mockInputStream).read((byte[]) any(), eq(0), eq(2));
verify(mockInputStream).read((byte[]) any(), eq(2), eq(1));
}
/** Stubs the input stream to read 0xFF for all requests. */
private void stubInputStreamForReadingBytes() {
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt())).thenAnswer(
new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
byte[] bytes = (byte[]) invocation.getArguments()[0];
int offset = (int) invocation.getArguments()[1];
int length = (int) invocation.getArguments()[2];
for (int i = 0; i < length; i++) {
bytes[i + offset] = (byte) 0xFF;
}
return length;
}
});
when(mockInputStream.skip(anyInt())).thenAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
return (int) invocation.getArguments()[0];
}
});
}
/** Stubs the input stream to read end-of-stream for all requests. */
private void stubInputStreamForReadingEndOfStream() {
when(mockInputStream.read((byte[]) any(), anyInt(), anyInt()))
.thenReturn(SampleSource.END_OF_STREAM);
}
}