Make AudioTrack.handleBuffer more sane.

Now uses buffer position + limit in both new and legacy modes.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=120713837
This commit is contained in:
olly 2016-04-25 09:25:21 -07:00 committed by Oliver Woodman
parent d1eb43ad62
commit 5cbf75b619
5 changed files with 68 additions and 58 deletions

View File

@ -328,9 +328,9 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
@Override @Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec, protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
throws ExoPlaybackException { boolean shouldSkip) throws ExoPlaybackException {
if (passthroughEnabled && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// Discard output buffers from the passthrough (raw) decoder containing codec specific data. // Discard output buffers from the passthrough (raw) decoder containing codec specific data.
codec.releaseOutputBuffer(bufferIndex, false); codec.releaseOutputBuffer(bufferIndex, false);
return true; return true;
@ -374,8 +374,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
int handleBufferResult; int handleBufferResult;
try { try {
handleBufferResult = audioTrack.handleBuffer( handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs);
buffer, bufferInfo.offset, bufferInfo.size, bufferInfo.presentationTimeUs);
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime(); lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
} catch (AudioTrack.WriteException e) { } catch (AudioTrack.WriteException e) {
notifyAudioTrackWriteError(e); notifyAudioTrackWriteError(e);

View File

@ -798,32 +798,34 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
if (outputIndex < 0) { if (outputIndex < 0) {
outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs()); outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs());
} if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
processOutputFormat();
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
processOutputFormat();
return true;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
codecCounters.outputBuffersChangedCount++;
return true;
} else if (outputIndex < 0) {
if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
|| codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
processEndOfStream();
return true; return true;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
codecCounters.outputBuffersChangedCount++;
return true;
} else if (outputIndex < 0) {
if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
|| codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
processEndOfStream();
return true;
}
return false;
} else if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
processEndOfStream();
return false;
} else {
ByteBuffer outputBuffer = outputBuffers[outputIndex];
outputBuffer.position(outputBufferInfo.offset);
outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
} }
return false;
}
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
processEndOfStream();
return false;
} }
int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs); int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex], if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) { outputIndex, outputBufferInfo.flags, outputBufferInfo.presentationTimeUs,
decodeOnlyIndex != -1)) {
onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs); onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs);
if (decodeOnlyIndex != -1) { if (decodeOnlyIndex != -1) {
decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex); decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
@ -855,8 +857,8 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
* @throws ExoPlaybackException If an error occurs processing the output buffer. * @throws ExoPlaybackException If an error occurs processing the output buffer.
*/ */
protected abstract boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, protected abstract boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs,
MediaCodec codec, ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, MediaCodec codec, ByteBuffer buffer, int bufferIndex, int bufferFlags,
boolean shouldSkip) throws ExoPlaybackException; long bufferPresentationTimeUs, boolean shouldSkip) throws ExoPlaybackException;
/** /**
* Processes an end of stream signal. * Processes an end of stream signal.

View File

@ -429,7 +429,8 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
@Override @Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec, protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) { ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
boolean shouldSkip) {
if (shouldSkip) { if (shouldSkip) {
skipOutputBuffer(codec, bufferIndex); skipOutputBuffer(codec, bufferIndex);
consecutiveDroppedFrameCount = 0; consecutiveDroppedFrameCount = 0;
@ -452,7 +453,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
// Compute how many microseconds it is until the buffer's presentation time. // Compute how many microseconds it is until the buffer's presentation time.
long elapsedSinceStartOfLoopUs = (SystemClock.elapsedRealtime() * 1000) - elapsedRealtimeUs; long elapsedSinceStartOfLoopUs = (SystemClock.elapsedRealtime() * 1000) - elapsedRealtimeUs;
long earlyUs = bufferInfo.presentationTimeUs - positionUs - elapsedSinceStartOfLoopUs; long earlyUs = bufferPresentationTimeUs - positionUs - elapsedSinceStartOfLoopUs;
// Compute the buffer's desired release time in nanoseconds. // Compute the buffer's desired release time in nanoseconds.
long systemTimeNs = System.nanoTime(); long systemTimeNs = System.nanoTime();
@ -460,7 +461,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
// Apply a timestamp adjustment, if there is one. // Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime( long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs); bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000; earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
if (earlyUs < -30000) { if (earlyUs < -30000) {

View File

@ -42,7 +42,7 @@ import java.nio.ByteBuffer;
* Before starting playback, specify the input audio format by calling one of the {@link #configure} * Before starting playback, specify the input audio format by calling one of the {@link #configure}
* methods and {@link #initialize} the instance, optionally specifying an audio session. * methods and {@link #initialize} the instance, optionally specifying an audio session.
* <p> * <p>
* Call {@link #handleBuffer(ByteBuffer, int, int, long)} to write data to play back, and * Call {@link #handleBuffer(ByteBuffer, long)} to write data to play back, and
* {@link #handleDiscontinuity()} when a buffer is skipped. Call {@link #play()} to start playing * {@link #handleDiscontinuity()} when a buffer is skipped. Call {@link #play()} to start playing
* back written data. * back written data.
* <p> * <p>
@ -226,7 +226,7 @@ public final class AudioTrack {
private byte[] temporaryBuffer; private byte[] temporaryBuffer;
private int temporaryBufferOffset; private int temporaryBufferOffset;
private int bufferBytesRemaining; private ByteBuffer currentBuffer;
/** /**
* Creates an audio track with default audio capabilities (no encoded audio passthrough support). * Creates an audio track with default audio capabilities (no encoded audio passthrough support).
@ -533,23 +533,33 @@ public final class AudioTrack {
} }
/** /**
* Attempts to write {@code size} bytes from {@code buffer} at {@code offset} to the audio track. * Attempts to write data from a {@link ByteBuffer} to the audio track, starting from its current
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released * position and ending at its limit (exclusive). The position of the {@link ByteBuffer} is
* (due to having been written), and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was * advanced by the number of bytes that were successfully written.
* discontinuous with previously written data. * <p>
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the data was written in full,
* and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was discontinuous with previously
* written data.
* <p>
* If the data was not written in full then the same {@link ByteBuffer} must be provided to
* subsequent calls until it has been fully consumed, except in the case of an interleaving call
* to {@link #configure(MediaFormat, boolean)} or {@link #reset}.
* *
* @param buffer The buffer containing audio data to play back. * @param buffer The buffer containing audio data to play back.
* @param offset The offset in the buffer from which to consume data.
* @param size The number of bytes to consume from {@code buffer}.
* @param presentationTimeUs Presentation timestamp of the next buffer in microseconds. * @param presentationTimeUs Presentation timestamp of the next buffer in microseconds.
* @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and * @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
* {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously * {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
* written data. * written data.
* @throws WriteException If an error occurs writing the audio data. * @throws WriteException If an error occurs writing the audio data.
*/ */
public int handleBuffer(ByteBuffer buffer, int offset, int size, long presentationTimeUs) public int handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
throws WriteException { boolean isNewBuffer = currentBuffer == null;
if (size == 0) { Assertions.checkState(isNewBuffer || currentBuffer == buffer);
currentBuffer = buffer;
int bytesRemaining = buffer.remaining();
if (bytesRemaining == 0) {
currentBuffer = null;
return RESULT_BUFFER_CONSUMED; return RESULT_BUFFER_CONSUMED;
} }
@ -570,11 +580,8 @@ public final class AudioTrack {
} }
int result = 0; int result = 0;
if (bufferBytesRemaining == 0) { if (isNewBuffer) {
// The previous buffer (if there was one) was fully written to the audio track. We're now // We're seeing this buffer for the first time.
// seeing a new buffer for the first time.
bufferBytesRemaining = size;
buffer.position(offset);
if (passthrough && framesPerEncodedSample == 0) { if (passthrough && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames. // If this is the first encoded sample, calculate the sample size in frames.
framesPerEncodedSample = getFramesPerEncodedSample(encoding, buffer); framesPerEncodedSample = getFramesPerEncodedSample(encoding, buffer);
@ -602,10 +609,12 @@ public final class AudioTrack {
} }
if (Util.SDK_INT < 21) { if (Util.SDK_INT < 21) {
// Copy {@code buffer} into {@code temporaryBuffer}. // Copy {@code buffer} into {@code temporaryBuffer}.
if (temporaryBuffer == null || temporaryBuffer.length < size) { if (temporaryBuffer == null || temporaryBuffer.length < bytesRemaining) {
temporaryBuffer = new byte[size]; temporaryBuffer = new byte[bytesRemaining];
} }
buffer.get(temporaryBuffer, 0, size); int originalPosition = buffer.position();
buffer.get(temporaryBuffer, 0, bytesRemaining);
buffer.position(originalPosition);
temporaryBufferOffset = 0; temporaryBufferOffset = 0;
} }
} }
@ -617,36 +626,36 @@ public final class AudioTrack {
(int) (submittedPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * pcmFrameSize)); (int) (submittedPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * pcmFrameSize));
int bytesToWrite = bufferSize - bytesPending; int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) { if (bytesToWrite > 0) {
bytesToWrite = Math.min(bufferBytesRemaining, bytesToWrite); bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite); bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
if (bytesWritten >= 0) { if (bytesWritten >= 0) {
temporaryBufferOffset += bytesWritten; temporaryBufferOffset += bytesWritten;
} }
buffer.position(buffer.position() + bytesWritten);
} }
} else { } else {
bytesWritten = writeNonBlockingV21(audioTrack, buffer, bufferBytesRemaining); bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
} }
if (bytesWritten < 0) { if (bytesWritten < 0) {
throw new WriteException(bytesWritten); throw new WriteException(bytesWritten);
} }
bufferBytesRemaining -= bytesWritten;
if (!passthrough) { if (!passthrough) {
submittedPcmBytes += bytesWritten; submittedPcmBytes += bytesWritten;
} }
if (bufferBytesRemaining == 0) { if (bytesWritten == bytesRemaining) {
if (passthrough) { if (passthrough) {
submittedEncodedFrames += framesPerEncodedSample; submittedEncodedFrames += framesPerEncodedSample;
} }
currentBuffer = null;
result |= RESULT_BUFFER_CONSUMED; result |= RESULT_BUFFER_CONSUMED;
} }
return result; return result;
} }
/** /**
* Ensures that the last data passed to {@link #handleBuffer(ByteBuffer, int, int, long)} is * Ensures that the last data passed to {@link #handleBuffer(ByteBuffer, long)} is played in full.
* played out in full.
*/ */
public void handleEndOfStream() { public void handleEndOfStream() {
if (isInitialized()) { if (isInitialized()) {
@ -730,7 +739,7 @@ public final class AudioTrack {
submittedPcmBytes = 0; submittedPcmBytes = 0;
submittedEncodedFrames = 0; submittedEncodedFrames = 0;
framesPerEncodedSample = 0; framesPerEncodedSample = 0;
bufferBytesRemaining = 0; currentBuffer = null;
startMediaTimeState = START_NOT_SET; startMediaTimeState = START_NOT_SET;
latencyUs = 0; latencyUs = 0;
resetSyncParams(); resetSyncParams();

View File

@ -190,8 +190,7 @@ public abstract class AudioDecoderTrackRenderer extends TrackRenderer implements
} }
int handleBufferResult; int handleBufferResult;
handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.data.position(), handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timestampUs);
outputBuffer.data.remaining(), outputBuffer.timestampUs);
// If we are out of sync, allow currentPositionUs to jump backwards. // If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) { if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {