Make AudioTrack.handleBuffer more sane.

Now uses buffer position + limit in both new and legacy modes.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=120713837
This commit is contained in:
olly 2016-04-25 09:25:21 -07:00 committed by Oliver Woodman
parent d1eb43ad62
commit 5cbf75b619
5 changed files with 68 additions and 58 deletions

View File

@ -328,9 +328,9 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip)
throws ExoPlaybackException {
if (passthroughEnabled && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
boolean shouldSkip) throws ExoPlaybackException {
if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// Discard output buffers from the passthrough (raw) decoder containing codec specific data.
codec.releaseOutputBuffer(bufferIndex, false);
return true;
@ -374,8 +374,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer implem
int handleBufferResult;
try {
handleBufferResult = audioTrack.handleBuffer(
buffer, bufferInfo.offset, bufferInfo.size, bufferInfo.presentationTimeUs);
handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs);
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
} catch (AudioTrack.WriteException e) {
notifyAudioTrackWriteError(e);

View File

@ -798,32 +798,34 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
if (outputIndex < 0) {
outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs());
}
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
processOutputFormat();
return true;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
codecCounters.outputBuffersChangedCount++;
return true;
} else if (outputIndex < 0) {
if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
|| codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
processEndOfStream();
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
processOutputFormat();
return true;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = codec.getOutputBuffers();
codecCounters.outputBuffersChangedCount++;
return true;
} else if (outputIndex < 0) {
if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
|| codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
processEndOfStream();
return true;
}
return false;
} else if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
processEndOfStream();
return false;
} else {
ByteBuffer outputBuffer = outputBuffers[outputIndex];
outputBuffer.position(outputBufferInfo.offset);
outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
}
return false;
}
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
processEndOfStream();
return false;
}
int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
outputIndex, outputBufferInfo.flags, outputBufferInfo.presentationTimeUs,
decodeOnlyIndex != -1)) {
onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs);
if (decodeOnlyIndex != -1) {
decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
@ -855,8 +857,8 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
* @throws ExoPlaybackException If an error occurs processing the output buffer.
*/
protected abstract boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs,
MediaCodec codec, ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex,
boolean shouldSkip) throws ExoPlaybackException;
MediaCodec codec, ByteBuffer buffer, int bufferIndex, int bufferFlags,
long bufferPresentationTimeUs, boolean shouldSkip) throws ExoPlaybackException;
/**
* Processes an end of stream signal.

View File

@ -429,7 +429,8 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo, int bufferIndex, boolean shouldSkip) {
ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
boolean shouldSkip) {
if (shouldSkip) {
skipOutputBuffer(codec, bufferIndex);
consecutiveDroppedFrameCount = 0;
@ -452,7 +453,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
// Compute how many microseconds it is until the buffer's presentation time.
long elapsedSinceStartOfLoopUs = (SystemClock.elapsedRealtime() * 1000) - elapsedRealtimeUs;
long earlyUs = bufferInfo.presentationTimeUs - positionUs - elapsedSinceStartOfLoopUs;
long earlyUs = bufferPresentationTimeUs - positionUs - elapsedSinceStartOfLoopUs;
// Compute the buffer's desired release time in nanoseconds.
long systemTimeNs = System.nanoTime();
@ -460,7 +461,7 @@ public class MediaCodecVideoTrackRenderer extends MediaCodecTrackRenderer {
// Apply a timestamp adjustment, if there is one.
long adjustedReleaseTimeNs = frameReleaseTimeHelper.adjustReleaseTime(
bufferInfo.presentationTimeUs, unadjustedFrameReleaseTimeNs);
bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
if (earlyUs < -30000) {

View File

@ -42,7 +42,7 @@ import java.nio.ByteBuffer;
* Before starting playback, specify the input audio format by calling one of the {@link #configure}
* methods and {@link #initialize} the instance, optionally specifying an audio session.
* <p>
* Call {@link #handleBuffer(ByteBuffer, int, int, long)} to write data to play back, and
* Call {@link #handleBuffer(ByteBuffer, long)} to write data to play back, and
* {@link #handleDiscontinuity()} when a buffer is skipped. Call {@link #play()} to start playing
* back written data.
* <p>
@ -226,7 +226,7 @@ public final class AudioTrack {
private byte[] temporaryBuffer;
private int temporaryBufferOffset;
private int bufferBytesRemaining;
private ByteBuffer currentBuffer;
/**
* Creates an audio track with default audio capabilities (no encoded audio passthrough support).
@ -533,23 +533,33 @@ public final class AudioTrack {
}
/**
* Attempts to write {@code size} bytes from {@code buffer} at {@code offset} to the audio track.
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released
* (due to having been written), and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was
* discontinuous with previously written data.
* Attempts to write data from a {@link ByteBuffer} to the audio track, starting from its current
* position and ending at its limit (exclusive). The position of the {@link ByteBuffer} is
* advanced by the number of bytes that were successfully written.
* <p>
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the data was written in full,
* and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was discontinuous with previously
* written data.
* <p>
* If the data was not written in full then the same {@link ByteBuffer} must be provided to
* subsequent calls until it has been fully consumed, except in the case of an interleaving call
* to {@link #configure(MediaFormat, boolean)} or {@link #reset}.
*
* @param buffer The buffer containing audio data to play back.
* @param offset The offset in the buffer from which to consume data.
* @param size The number of bytes to consume from {@code buffer}.
* @param presentationTimeUs Presentation timestamp of the next buffer in microseconds.
* @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
* {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
* written data.
* @throws WriteException If an error occurs writing the audio data.
*/
public int handleBuffer(ByteBuffer buffer, int offset, int size, long presentationTimeUs)
throws WriteException {
if (size == 0) {
public int handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
boolean isNewBuffer = currentBuffer == null;
Assertions.checkState(isNewBuffer || currentBuffer == buffer);
currentBuffer = buffer;
int bytesRemaining = buffer.remaining();
if (bytesRemaining == 0) {
currentBuffer = null;
return RESULT_BUFFER_CONSUMED;
}
@ -570,11 +580,8 @@ public final class AudioTrack {
}
int result = 0;
if (bufferBytesRemaining == 0) {
// The previous buffer (if there was one) was fully written to the audio track. We're now
// seeing a new buffer for the first time.
bufferBytesRemaining = size;
buffer.position(offset);
if (isNewBuffer) {
// We're seeing this buffer for the first time.
if (passthrough && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames.
framesPerEncodedSample = getFramesPerEncodedSample(encoding, buffer);
@ -602,10 +609,12 @@ public final class AudioTrack {
}
if (Util.SDK_INT < 21) {
// Copy {@code buffer} into {@code temporaryBuffer}.
if (temporaryBuffer == null || temporaryBuffer.length < size) {
temporaryBuffer = new byte[size];
if (temporaryBuffer == null || temporaryBuffer.length < bytesRemaining) {
temporaryBuffer = new byte[bytesRemaining];
}
buffer.get(temporaryBuffer, 0, size);
int originalPosition = buffer.position();
buffer.get(temporaryBuffer, 0, bytesRemaining);
buffer.position(originalPosition);
temporaryBufferOffset = 0;
}
}
@ -617,36 +626,36 @@ public final class AudioTrack {
(int) (submittedPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * pcmFrameSize));
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) {
bytesToWrite = Math.min(bufferBytesRemaining, bytesToWrite);
bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
if (bytesWritten >= 0) {
temporaryBufferOffset += bytesWritten;
}
buffer.position(buffer.position() + bytesWritten);
}
} else {
bytesWritten = writeNonBlockingV21(audioTrack, buffer, bufferBytesRemaining);
bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
}
if (bytesWritten < 0) {
throw new WriteException(bytesWritten);
}
bufferBytesRemaining -= bytesWritten;
if (!passthrough) {
submittedPcmBytes += bytesWritten;
}
if (bufferBytesRemaining == 0) {
if (bytesWritten == bytesRemaining) {
if (passthrough) {
submittedEncodedFrames += framesPerEncodedSample;
}
currentBuffer = null;
result |= RESULT_BUFFER_CONSUMED;
}
return result;
}
/**
* Ensures that the last data passed to {@link #handleBuffer(ByteBuffer, int, int, long)} is
* played out in full.
* Ensures that the last data passed to {@link #handleBuffer(ByteBuffer, long)} is played in full.
*/
public void handleEndOfStream() {
if (isInitialized()) {
@ -730,7 +739,7 @@ public final class AudioTrack {
submittedPcmBytes = 0;
submittedEncodedFrames = 0;
framesPerEncodedSample = 0;
bufferBytesRemaining = 0;
currentBuffer = null;
startMediaTimeState = START_NOT_SET;
latencyUs = 0;
resetSyncParams();

View File

@ -190,8 +190,7 @@ public abstract class AudioDecoderTrackRenderer extends TrackRenderer implements
}
int handleBufferResult;
handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.data.position(),
outputBuffer.data.remaining(), outputBuffer.timestampUs);
handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timestampUs);
// If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {