mirror of
https://github.com/androidx/media.git
synced 2025-05-16 03:59:54 +08:00
Fix SampleMetadataQueue.getLargestQueuedTimestampUs
This was broken prior to my recent changes, since largestDequeuedTimestampUs was only being updated in readData. It should have been updated in the skip methods. as well. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=159704945
This commit is contained in:
parent
531eb15ff4
commit
9bad78dce6
@ -421,6 +421,43 @@ public class SampleQueueTest extends TestCase {
|
||||
assertNoSamplesToRead(TEST_FORMAT_2);
|
||||
}
|
||||
|
||||
public void testLargestQueuedTimestampWithDiscardUpstream() {
|
||||
writeTestData();
|
||||
assertEquals(LAST_SAMPLE_TIMESTAMP, sampleQueue.getLargestQueuedTimestampUs());
|
||||
sampleQueue.discardUpstreamSamples(TEST_SAMPLE_TIMESTAMPS.length - 1);
|
||||
// Discarding from upstream should reduce the largest timestamp.
|
||||
assertEquals(TEST_SAMPLE_TIMESTAMPS[TEST_SAMPLE_TIMESTAMPS.length - 2],
|
||||
sampleQueue.getLargestQueuedTimestampUs());
|
||||
sampleQueue.discardUpstreamSamples(0);
|
||||
// Discarding everything from upstream without reading should unset the largest timestamp.
|
||||
assertEquals(Long.MIN_VALUE, sampleQueue.getLargestQueuedTimestampUs());
|
||||
}
|
||||
|
||||
public void testLargestQueuedTimestampWithDiscardUpstreamDecodeOrder() {
|
||||
long[] decodeOrderTimestamps = new long[] {0, 3000, 2000, 1000, 4000, 7000, 6000, 5000};
|
||||
writeTestData(TEST_DATA, TEST_SAMPLE_SIZES, TEST_SAMPLE_OFFSETS, decodeOrderTimestamps,
|
||||
TEST_SAMPLE_FORMATS, TEST_SAMPLE_FLAGS);
|
||||
assertEquals(7000, sampleQueue.getLargestQueuedTimestampUs());
|
||||
sampleQueue.discardUpstreamSamples(TEST_SAMPLE_TIMESTAMPS.length - 2);
|
||||
// Discarding the last two samples should not change the largest timestamp, due to the decode
|
||||
// ordering of the timestamps.
|
||||
assertEquals(7000, sampleQueue.getLargestQueuedTimestampUs());
|
||||
sampleQueue.discardUpstreamSamples(TEST_SAMPLE_TIMESTAMPS.length - 3);
|
||||
// Once a third sample is discarded, the largest timestamp should have changed.
|
||||
assertEquals(4000, sampleQueue.getLargestQueuedTimestampUs());
|
||||
sampleQueue.discardUpstreamSamples(0);
|
||||
// Discarding everything from upstream without reading should unset the largest timestamp.
|
||||
assertEquals(Long.MIN_VALUE, sampleQueue.getLargestQueuedTimestampUs());
|
||||
}
|
||||
|
||||
public void testLargestQueuedTimestampWithRead() {
|
||||
writeTestData();
|
||||
assertEquals(LAST_SAMPLE_TIMESTAMP, sampleQueue.getLargestQueuedTimestampUs());
|
||||
assertReadTestData();
|
||||
// Reading everything should not reduce the largest timestamp.
|
||||
assertEquals(LAST_SAMPLE_TIMESTAMP, sampleQueue.getLargestQueuedTimestampUs());
|
||||
}
|
||||
|
||||
// Internal methods.
|
||||
|
||||
/**
|
||||
@ -428,15 +465,27 @@ public class SampleQueueTest extends TestCase {
|
||||
*/
|
||||
@SuppressWarnings("ReferenceEquality")
|
||||
private void writeTestData() {
|
||||
sampleQueue.sampleData(new ParsableByteArray(TEST_DATA), TEST_DATA.length);
|
||||
writeTestData(TEST_DATA, TEST_SAMPLE_SIZES, TEST_SAMPLE_OFFSETS, TEST_SAMPLE_TIMESTAMPS,
|
||||
TEST_SAMPLE_FORMATS, TEST_SAMPLE_FLAGS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the specified test data to {@code sampleQueue}.
|
||||
*
|
||||
*
|
||||
*/
|
||||
@SuppressWarnings("ReferenceEquality")
|
||||
private void writeTestData(byte[] data, int[] sampleSizes, int[] sampleOffsets,
|
||||
long[] sampleTimestamps, Format[] sampleFormats, int[] sampleFlags) {
|
||||
sampleQueue.sampleData(new ParsableByteArray(data), data.length);
|
||||
Format format = null;
|
||||
for (int i = 0; i < TEST_SAMPLE_TIMESTAMPS.length; i++) {
|
||||
if (TEST_SAMPLE_FORMATS[i] != format) {
|
||||
sampleQueue.format(TEST_SAMPLE_FORMATS[i]);
|
||||
format = TEST_SAMPLE_FORMATS[i];
|
||||
for (int i = 0; i < sampleTimestamps.length; i++) {
|
||||
if (sampleFormats[i] != format) {
|
||||
sampleQueue.format(sampleFormats[i]);
|
||||
format = sampleFormats[i];
|
||||
}
|
||||
sampleQueue.sampleMetadata(TEST_SAMPLE_TIMESTAMPS[i], TEST_SAMPLE_FLAGS[i],
|
||||
TEST_SAMPLE_SIZES[i], TEST_SAMPLE_OFFSETS[i], null);
|
||||
sampleQueue.sampleMetadata(sampleTimestamps[i], sampleFlags[i], sampleSizes[i],
|
||||
sampleOffsets[i], null);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ import com.google.android.exoplayer2.util.Util;
|
||||
private int relativeStartIndex;
|
||||
private int readPosition;
|
||||
|
||||
private long largestDequeuedTimestampUs;
|
||||
private long largestDiscardedTimestampUs;
|
||||
private long largestQueuedTimestampUs;
|
||||
private boolean upstreamKeyframeRequired;
|
||||
private boolean upstreamFormatRequired;
|
||||
@ -71,7 +71,7 @@ import com.google.android.exoplayer2.util.Util;
|
||||
sizes = new int[capacity];
|
||||
cryptoDatas = new CryptoData[capacity];
|
||||
formats = new Format[capacity];
|
||||
largestDequeuedTimestampUs = Long.MIN_VALUE;
|
||||
largestDiscardedTimestampUs = Long.MIN_VALUE;
|
||||
largestQueuedTimestampUs = Long.MIN_VALUE;
|
||||
upstreamFormatRequired = true;
|
||||
upstreamKeyframeRequired = true;
|
||||
@ -88,7 +88,7 @@ import com.google.android.exoplayer2.util.Util;
|
||||
// Called by the consuming thread, but only when there is no loading thread.
|
||||
|
||||
public void resetLargestParsedTimestamps() {
|
||||
largestDequeuedTimestampUs = Long.MIN_VALUE;
|
||||
largestDiscardedTimestampUs = Long.MIN_VALUE;
|
||||
largestQueuedTimestampUs = Long.MIN_VALUE;
|
||||
}
|
||||
|
||||
@ -121,16 +121,8 @@ import com.google.android.exoplayer2.util.Util;
|
||||
|
||||
length -= discardCount;
|
||||
relativeEndIndex = (relativeEndIndex + capacity - discardCount) % capacity;
|
||||
// Update the largest queued timestamp, assuming that the timestamps prior to a keyframe are
|
||||
// always less than the timestamp of the keyframe itself, and of subsequent frames.
|
||||
largestQueuedTimestampUs = Long.MIN_VALUE;
|
||||
for (int i = length - 1; i >= 0; i--) {
|
||||
int sampleIndex = (relativeStartIndex + i) % capacity;
|
||||
largestQueuedTimestampUs = Math.max(largestQueuedTimestampUs, timesUs[sampleIndex]);
|
||||
if ((flags[sampleIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
largestQueuedTimestampUs = Math.max(largestDiscardedTimestampUs,
|
||||
getLargestTimestamp(relativeStartIndex, length));
|
||||
return offsets[relativeEndIndex];
|
||||
}
|
||||
|
||||
@ -182,7 +174,7 @@ import com.google.android.exoplayer2.util.Util;
|
||||
* samples have been queued.
|
||||
*/
|
||||
public synchronized long getLargestQueuedTimestampUs() {
|
||||
return Math.max(largestDequeuedTimestampUs, largestQueuedTimestampUs);
|
||||
return largestQueuedTimestampUs;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -246,7 +238,6 @@ import com.google.android.exoplayer2.util.Util;
|
||||
extrasHolder.offset = offsets[relativeReadIndex];
|
||||
extrasHolder.cryptoData = cryptoDatas[relativeReadIndex];
|
||||
|
||||
largestDequeuedTimestampUs = Math.max(largestDequeuedTimestampUs, buffer.timeUs);
|
||||
readPosition++;
|
||||
return C.RESULT_BUFFER_READ;
|
||||
}
|
||||
@ -420,14 +411,16 @@ import com.google.android.exoplayer2.util.Util;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to discard samples from the tail of the queue to allow samples starting from the
|
||||
* specified timestamp to be spliced in.
|
||||
* Attempts to discard samples from the end of the queue to allow samples starting from the
|
||||
* specified timestamp to be spliced in. Samples will not be discarded prior to the read position.
|
||||
*
|
||||
* @param timeUs The timestamp at which the splice occurs.
|
||||
* @return Whether the splice was successful.
|
||||
*/
|
||||
public synchronized boolean attemptSplice(long timeUs) {
|
||||
if (largestDequeuedTimestampUs >= timeUs) {
|
||||
long largestReadTimestampUs = Math.max(largestDiscardedTimestampUs,
|
||||
getLargestTimestamp(relativeStartIndex, readPosition));
|
||||
if (largestReadTimestampUs >= timeUs) {
|
||||
return false;
|
||||
}
|
||||
int retainCount = length;
|
||||
@ -476,6 +469,8 @@ import com.google.android.exoplayer2.util.Util;
|
||||
* {@link C#POSITION_UNSET} if no discarding of data is necessary.
|
||||
*/
|
||||
private long discardSamples(int discardCount) {
|
||||
largestDiscardedTimestampUs = Math.max(largestDiscardedTimestampUs,
|
||||
getLargestTimestamp(relativeStartIndex, discardCount));
|
||||
length -= discardCount;
|
||||
absoluteStartIndex += discardCount;
|
||||
relativeStartIndex += discardCount;
|
||||
@ -494,4 +489,24 @@ import com.google.android.exoplayer2.util.Util;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the largest timestamp in the specified range, assuming that the timestamps prior to a
|
||||
* keyframe are always less than the timestamp of the keyframe itself, and of subsequent frames.
|
||||
*
|
||||
* @param relativeStartIndex The relative index from which to start searching.
|
||||
* @param length The length of the range being searched.
|
||||
* @return The largest timestamp, or {@link Long#MIN_VALUE} if {@code length <= 0}.
|
||||
*/
|
||||
private long getLargestTimestamp(int relativeStartIndex, int length) {
|
||||
long largestTimestampUs = Long.MIN_VALUE;
|
||||
for (int i = length - 1; i >= 0; i--) {
|
||||
int sampleIndex = (relativeStartIndex + i) % capacity;
|
||||
largestTimestampUs = Math.max(largestTimestampUs, timesUs[sampleIndex]);
|
||||
if ((flags[sampleIndex] & C.BUFFER_FLAG_KEY_FRAME) != 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return largestTimestampUs;
|
||||
}
|
||||
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user