FLV Support - Added Audio and Script Data readers

This commit is contained in:
joli 2015-09-29 03:30:57 +02:00
parent de68c982ab
commit 8ddc73511e
8 changed files with 793 additions and 0 deletions

View File

@ -145,6 +145,9 @@ import java.util.Locale;
"http://storage.googleapis.com/exoplayer-test-media-0/play.mp3", PlayerActivity.TYPE_OTHER),
new Sample("Google Glass (WebM Video with Vorbis Audio)",
"http://demos.webmproject.org/exoplayer/glass_vp9_vorbis.webm", PlayerActivity.TYPE_OTHER),
new Sample("FLV Sample",
"http://master255.org/res/%D0%9A%D0%BB%D0%B8%D0%BF%D1%8B/B/Black%20Eyed%20Peas/black%20ey"
+ "ed%20peas-My%20Humps.flv", PlayerActivity.TYPE_OTHER),
};
private Samples() {}

View File

@ -146,6 +146,13 @@ public final class ExtractorSampleSource implements SampleSource, SampleSourceRe
} catch (ClassNotFoundException e) {
// Extractor not found.
}
try {
DEFAULT_EXTRACTOR_CLASSES.add(
Class.forName("com.google.android.exoplayer.extractor.flv.FlvExtractor")
.asSubclass(Extractor.class));
} catch (ClassNotFoundException e) {
// Extractor not found.
}
}
private final ExtractorHolder extractorHolder;

View File

@ -0,0 +1,150 @@
package com.google.android.exoplayer.extractor.flv;
import android.util.Log;
import android.util.Pair;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.util.Collections;
/**
* Created by joliva on 9/27/15.
*/
public class AudioTagReader extends TagReader{
private static final String TAG = "AudioTagReader";
// Sound format
private static final int AUDIO_FORMAT_LINEAR_PCM_PLATFORM_ENDIAN = 0;
private static final int AUDIO_FORMAT_ADPCM = 1;
private static final int AUDIO_FORMAT_MP3 = 2;
private static final int AUDIO_FORMAT_LINEAR_PCM_LITTLE_ENDIAN = 3;
private static final int AUDIO_FORMAT_NELLYMOSER_16KHZ_MONO = 4;
private static final int AUDIO_FORMAT_NELLYMOSER_8KHZ_MONO = 5;
private static final int AUDIO_FORMAT_NELLYMOSER = 6;
private static final int AUDIO_FORMAT_G711_A_LAW = 7;
private static final int AUDIO_FORMAT_G711_MU_LAW = 8;
private static final int AUDIO_FORMAT_RESERVED = 9;
private static final int AUDIO_FORMAT_AAC = 10;
private static final int AUDIO_FORMAT_SPEEX = 11;
private static final int AUDIO_FORMAT_MP3_8KHZ = 14;
private static final int AUDIO_FORMAT_DEVICE_SPECIFIC = 15;
// AAC PACKET TYPE
private static final int AAC_PACKET_TYPE_SEQUENCE_HEADER = 0;
private static final int AAC_PACKET_TYPE_AAC_RAW = 1;
private static final int[] AUDIO_SAMPLING_RATE_TABLE = new int[] {
5500, 11000, 22000, 44000
};
private int format;
private int sampleRate;
private int bitsPerSample;
private int channels;
private boolean hasParsedAudioData;
private boolean hasOutputFormat;
/**
* @param output A {@link TrackOutput} to which samples should be written.
*/
public AudioTagReader(TrackOutput output) {
super(output);
}
@Override
public void seek() {
}
@Override
protected void parseHeader(ParsableByteArray data) throws UnsupportedTrack {
if (!hasParsedAudioData) {
int header = data.readUnsignedByte();
int soundFormat = (header >> 4) & 0x0F;
int sampleRateIndex = (header >> 2) & 0x03;
int bitsPerSample = (header & 0x02) == 0x02 ? 16 : 8;
int channels = (header & 0x01) + 1;
if (sampleRateIndex < 0 || sampleRateIndex >= AUDIO_SAMPLING_RATE_TABLE.length) {
throw new UnsupportedTrack("Invalid sample rate for the audio track");
}
if (!hasOutputFormat) {
switch (soundFormat) {
// raw audio data. Just creates media format
case AUDIO_FORMAT_LINEAR_PCM_LITTLE_ENDIAN:
output.format(MediaFormat.createAudioFormat(MimeTypes.AUDIO_RAW, MediaFormat.NO_VALUE,
MediaFormat.NO_VALUE, MediaFormat.NO_VALUE, channels,
AUDIO_SAMPLING_RATE_TABLE[sampleRateIndex], null, null));
hasOutputFormat = true;
break;
case AUDIO_FORMAT_AAC:
break;
case AUDIO_FORMAT_MP3:
case AUDIO_FORMAT_MP3_8KHZ:
case AUDIO_FORMAT_LINEAR_PCM_PLATFORM_ENDIAN:
default:
throw new UnsupportedTrack("Audio track not supported. Format: " + soundFormat +
", Sample rate: " + sampleRateIndex + ", bps: " + bitsPerSample + ", channels: " +
channels);
}
}
this.format = soundFormat;
this.sampleRate = AUDIO_SAMPLING_RATE_TABLE[sampleRateIndex];
this.bitsPerSample = bitsPerSample;
this.channels = channels;
hasParsedAudioData = true;
} else {
data.skipBytes(1);
}
}
@Override
protected void parsePayload(ParsableByteArray data, long timeUs) {
int packetType = data.readUnsignedByte();
if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) {
ParsableBitArray adtsScratch = new ParsableBitArray(new byte[data.bytesLeft()]);
data.readBytes(adtsScratch.data, 0, data.bytesLeft());
int audioObjectType = adtsScratch.readBits(5);
int sampleRateIndex = adtsScratch.readBits(4);
int channelConfig = adtsScratch.readBits(4);
byte[] audioSpecificConfig = CodecSpecificDataUtil.buildAacAudioSpecificConfig(
audioObjectType, sampleRateIndex, channelConfig);
Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig(
audioSpecificConfig);
MediaFormat mediaFormat = MediaFormat.createAudioFormat(MimeTypes.AUDIO_AAC,
MediaFormat.NO_VALUE, MediaFormat.NO_VALUE, durationUs, audioParams.second,
audioParams.first, Collections.singletonList(audioSpecificConfig), null);
output.format(mediaFormat);
hasOutputFormat = true;
} else if (packetType == AAC_PACKET_TYPE_AAC_RAW) {
int bytesToWrite = data.bytesLeft();
output.sampleData(data, bytesToWrite);
output.sampleMetadata(timeUs, C.SAMPLE_FLAG_SYNC, bytesToWrite, 0, null);
Log.d(TAG, "AAC TAG. Size: " + bytesToWrite + ", timeUs: " + timeUs);
}
}
@Override
protected boolean shouldParsePayload() {
return (format == AUDIO_FORMAT_AAC);
}
}

View File

@ -0,0 +1,236 @@
package com.google.android.exoplayer.extractor.flv;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.extractor.Extractor;
import com.google.android.exoplayer.extractor.ExtractorInput;
import com.google.android.exoplayer.extractor.ExtractorOutput;
import com.google.android.exoplayer.extractor.PositionHolder;
import com.google.android.exoplayer.extractor.SeekMap;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.ParsableByteArray;
import com.google.android.exoplayer.util.Util;
import java.io.EOFException;
import java.io.IOException;
/**
* Created by joliva on 9/26/15.
*/
public final class FlvExtractor implements Extractor {
// Header sizes
private static final int FLV_MIN_HEADER_SIZE = 9;
private static final int FLV_TAG_HEADER_SIZE = 11;
// Parser states.
private static final int STATE_READING_TAG_HEADER = 1;
private static final int STATE_READING_SAMPLE = 2;
// Tag types
private static final int TAG_TYPE_AUDIO = 8;
private static final int TAG_TYPE_VIDEO = 9;
private static final int TAG_TYPE_SCRIPT_DATA = 18;
private static final int FLV_TAG = Util.getIntegerCodeForString("FLV");
private final ParsableByteArray scratch;
private final ParsableByteArray headerBuffer;
private final ParsableByteArray tagHeaderBuffer;
private ParsableByteArray tagData;
// Extractor outputs.
private ExtractorOutput extractorOutput;
private TrackOutput trackOutput;
private boolean hasAudio;
private boolean hasVideo;
private int dataOffset;
private int parserState;
private TagHeader currentTagHeader;
private AudioTagReader audioReader;
private VideoTagReader videoReader;
private MetadataReader metadataReader;
public FlvExtractor() {
scratch = new ParsableByteArray(4);
headerBuffer = new ParsableByteArray(FLV_MIN_HEADER_SIZE);
tagHeaderBuffer = new ParsableByteArray(FLV_TAG_HEADER_SIZE);
dataOffset = 0;
hasAudio = false;
hasVideo = false;
currentTagHeader = new TagHeader();
}
@Override
public void init(ExtractorOutput output) {
this.extractorOutput = output;
trackOutput = extractorOutput.track(0);
extractorOutput.endTracks();
output.seekMap(SeekMap.UNSEEKABLE);
}
@Override
public boolean sniff(ExtractorInput input) throws IOException, InterruptedException {
// Check if file starts with "FLV" tag
input.peekFully(scratch.data, 0, 3);
scratch.setPosition(0);
if (scratch.readUnsignedInt24() != FLV_TAG) {
return false;
}
/*
// Checking reserved flags are set to 0
input.peekFully(scratch.data, 0, 2);
scratch.setPosition(0);
if ((scratch.readUnsignedShort() & 0xFA) != 0) {
return false;
}
// Read data offset
input.peekFully(scratch.data, 0, 4);
scratch.setPosition(0);
int dataOffset = scratch.readInt();
input.resetPeekPosition();
input.advancePeekPosition(dataOffset);
// Checking first "previous tag size" is set to 0
input.peekFully(scratch.data, 0, 1);
scratch.setPosition(0);
if (scratch.readInt() != 0) {
return false;
}
*/
return true;
}
@Override
public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException,
InterruptedException {
if (dataOffset == 0
&& !readHeader(input)) {
return RESULT_END_OF_INPUT;
}
try {
while (true) {
switch (parserState) {
case STATE_READING_TAG_HEADER:
if (!readTagHeader(input)) {
return RESULT_END_OF_INPUT;
}
break;
default:
return readSample(input, seekPosition);
}
}
} catch (AudioTagReader.UnsupportedTrack unsupportedTrack) {
unsupportedTrack.printStackTrace();
return RESULT_END_OF_INPUT;
}
}
@Override
public void seek() {
dataOffset = 0;
}
private boolean readHeader(ExtractorInput input) throws IOException, InterruptedException {
try {
input.readFully(headerBuffer.data, 0, FLV_MIN_HEADER_SIZE);
headerBuffer.setPosition(0);
headerBuffer.skipBytes(4);
int flags = headerBuffer.readUnsignedByte();
hasAudio = (flags & 0x04) != 0;
hasVideo = (flags & 0x01) != 0;
if (hasAudio) {
audioReader = new AudioTagReader(trackOutput);
}
if (hasVideo) {
//videoReader = new VideoTagReader(trackOutput);
}
metadataReader = new MetadataReader(trackOutput);
dataOffset = headerBuffer.readInt();
input.skipFully(dataOffset - FLV_MIN_HEADER_SIZE);
parserState = STATE_READING_TAG_HEADER;
} catch (EOFException eof) {
return false;
}
return true;
}
private boolean readTagHeader(ExtractorInput input) throws IOException, InterruptedException,
TagReader.UnsupportedTrack {
try {
input.skipFully(4);
input.readFully(tagHeaderBuffer.data, 0, FLV_TAG_HEADER_SIZE);
tagHeaderBuffer.setPosition(0);
// skipping previous tag size field.
int type = tagHeaderBuffer.readUnsignedByte();
int dataSize = tagHeaderBuffer.readUnsignedInt24();
long timestamp = tagHeaderBuffer.readUnsignedInt24();
timestamp = (tagHeaderBuffer.readUnsignedByte() << 24) | timestamp;
int streamId = tagHeaderBuffer.readUnsignedInt24();
currentTagHeader.type = type;
currentTagHeader.dataSize = dataSize;
currentTagHeader.timestamp = timestamp * 1000;
currentTagHeader.streamId = streamId;
Assertions.checkState(dataSize <= Integer.MAX_VALUE);
tagData = new ParsableByteArray((int) dataSize);
parserState = STATE_READING_SAMPLE;
} catch (EOFException eof) {
return false;
}
return true;
}
private int readSample(ExtractorInput input, PositionHolder seekPosition) throws IOException,
InterruptedException, AudioTagReader.UnsupportedTrack {
if (tagData != null) {
if (!input.readFully(tagData.data, 0, currentTagHeader.dataSize, true)) {
return RESULT_END_OF_INPUT;
}
tagData.setPosition(0);
} else {
input.skipFully(currentTagHeader.dataSize);
return RESULT_CONTINUE;
}
if (currentTagHeader.type == TAG_TYPE_AUDIO && audioReader != null) {
audioReader.consume(tagData, currentTagHeader.timestamp);
} else if (currentTagHeader.type == TAG_TYPE_VIDEO && videoReader != null) {
videoReader.consume(tagData, currentTagHeader.timestamp);
} else if (currentTagHeader.type == TAG_TYPE_SCRIPT_DATA && metadataReader != null) {
metadataReader.consume(tagData, currentTagHeader.timestamp);
if (metadataReader.durationUs != C.UNKNOWN_TIME_US) {
if (audioReader != null) {
audioReader.durationUs = metadataReader.durationUs;
}
if (videoReader != null) {
videoReader.durationUs = metadataReader.durationUs;
}
}
} else {
tagData.reset();
}
parserState = STATE_READING_TAG_HEADER;
return RESULT_CONTINUE;
}
}

View File

@ -0,0 +1,203 @@
package com.google.android.exoplayer.extractor.flv;
import android.util.Log;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Created by joliva on 9/28/15.
*/
public class MetadataReader extends TagReader{
private static final int METADATA_TYPE_UNKNOWN = -1;
private static final int METADATA_TYPE_NUMBER = 0;
private static final int METADATA_TYPE_BOOLEAN = 1;
private static final int METADATA_TYPE_STRING = 2;
private static final int METADATA_TYPE_OBJECT = 3;
private static final int METADATA_TYPE_MOVIE_CLIP = 4;
private static final int METADATA_TYPE_NULL = 5;
private static final int METADATA_TYPE_UNDEFINED = 6;
private static final int METADATA_TYPE_REFERENCE = 7;
private static final int METADATA_TYPE_ECMA_ARRAY = 8;
private static final int METADATA_TYPE_STRICT_ARRAY = 10;
private static final int METADATA_TYPE_DATE = 11;
private static final int METADATA_TYPE_LONG_STRING = 12;
public long startTime = C.UNKNOWN_TIME_US;
public float frameRate;
public float videoDataRate;
public float audioDataRate;
public int height;
public int width;
public boolean canSeekOnTime;
public String httpHostHeader;
/**
* @param output A {@link TrackOutput} to which samples should be written.
*/
public MetadataReader(TrackOutput output) {
super(output);
}
@Override
public void seek() {
}
@Override
protected void parseHeader(ParsableByteArray data) throws UnsupportedTrack {
}
@Override
protected void parsePayload(ParsableByteArray data, long timeUs) {
Object messageName = readAMFData(data, METADATA_TYPE_UNKNOWN);
Object obj = readAMFData(data, METADATA_TYPE_UNKNOWN);
if(obj instanceof Map) {
Map<String, Object> extractedMetadata = (Map<String, Object>) obj;
for (Map.Entry<String, Object> entry : extractedMetadata.entrySet()) {
if (entry.getValue() == null) {
continue;
}
Log.d("Metadata", "Key: " + entry.getKey() + ", Value: " + entry.getValue().toString());
switch (entry.getKey()) {
case "totalduration":
this.durationUs = (long)(C.MICROS_PER_SECOND * (Double)(entry.getValue()));
break;
case "starttime":
this.startTime = (long)(C.MICROS_PER_SECOND * (Double)(entry.getValue()));
break;
case "videodatarate":
this.videoDataRate = ((Double)entry.getValue()).floatValue();
break;
case "audiodatarate":
this.audioDataRate = ((Double)entry.getValue()).floatValue();
break;
case "framerate":
this.frameRate = ((Double)entry.getValue()).floatValue();
break;
case "width":
this.width = Math.round(((Double) entry.getValue()).floatValue());
break;
case "height":
this.height = Math.round(((Double) entry.getValue()).floatValue());
break;
case "canseekontime":
this.canSeekOnTime = (boolean) entry.getValue();
break;
case "httphostheader":
this.httpHostHeader = (String) entry.getValue();
break;
default:
break;
}
}
}
}
@Override
protected boolean shouldParsePayload() {
return true;
}
private Object readAMFData(ParsableByteArray data, int type) {
if (type == METADATA_TYPE_UNKNOWN) {
type = data.readUnsignedByte();
}
byte [] b;
switch (type) {
case METADATA_TYPE_NUMBER:
return readAMFDouble(data);
case METADATA_TYPE_BOOLEAN:
return readAMFBoolean(data);
case METADATA_TYPE_STRING:
return readAMFString(data);
case METADATA_TYPE_OBJECT:
return readAMFObject(data);
case METADATA_TYPE_ECMA_ARRAY:
return readAMFEcmaArray(data);
case METADATA_TYPE_STRICT_ARRAY:
return readAMFStrictArray(data);
case METADATA_TYPE_DATE:
return readAMFDouble(data);
default:
return null;
}
}
private Boolean readAMFBoolean(ParsableByteArray data) {
return Boolean.valueOf(data.readUnsignedByte() == 1);
}
private Double readAMFDouble(ParsableByteArray data) {
byte []b = new byte[8];
data.readBytes(b, 0, b.length);
return ByteBuffer.wrap(b).getDouble();
}
private String readAMFString(ParsableByteArray data) {
int size = data.readUnsignedShort();
byte []b = new byte[size];
data.readBytes(b, 0, b.length);
return new String(b);
}
private Object readAMFStrictArray(ParsableByteArray data) {
long count = data.readUnsignedInt();
ArrayList<Object> list = new ArrayList<Object>();
for (int i = 0; i < count; i++) {
list.add(readAMFData(data, METADATA_TYPE_UNKNOWN));
}
return list;
}
private Object readAMFObject(ParsableByteArray data) {
HashMap<String, Object> array = new HashMap<String, Object>();
while (true) {
String key = readAMFString(data);
int type = data.readUnsignedByte();
if (type == 9) { // object end marker
break;
}
array.put(key, readAMFData(data, type));
}
return array;
}
private Object readAMFEcmaArray(ParsableByteArray data) {
long count = data.readUnsignedInt();
HashMap<String, Object> array = new HashMap<String, Object>();
for (int i = 0; i < count; i++) {
String key = readAMFString(data);
int type = data.readUnsignedByte();
array.put(key, readAMFData(data, type));
}
return array;
}
private Date readAMFDate(ParsableByteArray data) {
final Date date = new Date((long) readAMFDouble(data).doubleValue());
data.readUnsignedShort();
return date;
}
}

View File

@ -0,0 +1,15 @@
package com.google.android.exoplayer.extractor.flv;
/**
* Created by joliva on 9/26/15.
*/
final class TagHeader {
public static final int TAG_TYPE_AUDIO = 8;
public static final int TAG_TYPE_VIDEO = 9;
public static final int TAG_TYPE_SCRIPT_DATA = 18;
public int type;
public int dataSize;
public long timestamp;
public int streamId;
}

View File

@ -0,0 +1,75 @@
package com.google.android.exoplayer.extractor.flv;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.ParsableByteArray;
/**
* Extracts individual samples from FLV tags.
*/
/* package */ abstract class TagReader {
protected final TrackOutput output;
public long durationUs;
/**
* @param output A {@link TrackOutput} to which samples should be written.
*/
protected TagReader(TrackOutput output) {
this.output = output;
this.durationUs = C.UNKNOWN_TIME_US;
}
/**
* Notifies the reader that a seek has occurred.
* <p>
* Following a call to this method, the data passed to the next invocation of
* {@link #consume(ParsableByteArray, long)} will not be a continuation of the data that
* was previously passed. Hence the reader should reset any internal state.
*/
public abstract void seek();
/**
* Parses tag header
* @param data Buffer where the tag header is stored
*/
protected abstract void parseHeader(ParsableByteArray data) throws UnsupportedTrack;
/**
* Parses tag payload
* @param data Buffer where tag payload is stored
* @param timeUs Time position of the frame
*/
protected abstract void parsePayload(ParsableByteArray data, long timeUs);
/**
* Evaluate if for the current tag, payload should be parsed
* @return
*/
protected abstract boolean shouldParsePayload();
/**
* Consumes (possibly partial) payload data.
*
* @param data The payload data to consume.
* @param timeUs The timestamp associated with the payload.
*/
public void consume(ParsableByteArray data, long timeUs) throws UnsupportedTrack {
parseHeader(data);
if (shouldParsePayload()) {
parsePayload(data, timeUs);
}
}
/**
* Thrown when format described in the AudioTrack is not supported
*/
public static final class UnsupportedTrack extends Exception {
public UnsupportedTrack(String msg) {
super(msg);
}
}
}

View File

@ -0,0 +1,104 @@
package com.google.android.exoplayer.extractor.flv;
import android.util.Log;
import android.util.Pair;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.extractor.TrackOutput;
import com.google.android.exoplayer.util.CodecSpecificDataUtil;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.ParsableBitArray;
import com.google.android.exoplayer.util.ParsableByteArray;
import java.util.Collections;
/**
* Created by joliva on 9/27/15.
*/
public class VideoTagReader extends TagReader{
private static final String TAG = "VideoTagReader";
// Video codec
private static final int VIDEO_CODEC_JPEG = 1;
private static final int VIDEO_CODEC_H263 = 2;
private static final int VIDEO_CODEC_SCREEN_VIDEO = 3;
private static final int VIDEO_CODEC_VP6 = 4;
private static final int VIDEO_CODEC_VP6_WITH_ALPHA_CHANNEL = 5;
private static final int VIDEO_CODEC_SCREEN_VIDEO_V2 = 6;
private static final int VIDEO_CODEC_AVC = 7;
// FRAME TYPE
private static final int VIDEO_FRAME_KEYFRAME = 1;
private static final int VIDEO_FRAME_INTERFRAME = 2;
private static final int VIDEO_FRAME_DISPOSABLE_INTERFRAME = 3;
private static final int VIDEO_FRAME_GENERATED_KEYFRAME = 4;
private static final int VIDEO_FRAME_VIDEO_INFO = 5;
// PACKET TYPE
private static final int AVC_PACKET_TYPE_SEQUENCE_HEADER = 0;
private static final int AVC_PACKET_TYPE_AVC_NALU = 1;
private static final int AVC_PACKET_TYPE_AVC_END_OF_SEQUENCE = 2;
private boolean hasOutputFormat;
private int format;
private int frameType;
/**
* @param output A {@link TrackOutput} to which samples should be written.
*/
public VideoTagReader(TrackOutput output) {
super(output);
}
@Override
public void seek() {
}
@Override
protected void parseHeader(ParsableByteArray data) throws UnsupportedTrack {
int header = data.readUnsignedByte();
int frameType = (header >> 4) & 0x0F;
int videoCodec = (header & 0x0F);
if (videoCodec != VIDEO_CODEC_AVC) {
throw new UnsupportedTrack("Video codec not supported. Codec: " + videoCodec);
}
this.format = videoCodec;
this.frameType = frameType;
}
@Override
protected void parsePayload(ParsableByteArray data, long timeUs) {
int packetType = data.readUnsignedByte();
int compositionTime = data.readUnsignedInt24();
if (packetType == AVC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) {
ParsableBitArray videoSequence = new ParsableBitArray(new byte[data.bytesLeft()]);
data.readBytes(videoSequence.data, 0, data.bytesLeft());
/*
// Construct and output the format.
output.format(MediaFormat.createVideoFormat(MimeTypes.VIDEO_H264, MediaFormat.NO_VALUE,
MediaFormat.NO_VALUE, C.UNKNOWN_TIME_US, parsedSpsData.width, parsedSpsData.height,
initializationData, MediaFormat.NO_VALUE, parsedSpsData.pixelWidthAspectRatio));
*/
// output.format(mediaFormat);
hasOutputFormat = true;
} else if (packetType == AVC_PACKET_TYPE_AVC_NALU) {
int bytesToWrite = data.bytesLeft();
output.sampleData(data, bytesToWrite);
output.sampleMetadata(timeUs, frameType == VIDEO_FRAME_KEYFRAME ? C.SAMPLE_FLAG_SYNC : 0,
bytesToWrite, 0, null);
Log.d(TAG, "AAC TAG. Size: " + bytesToWrite + ", timeUs: " + timeUs);
}
}
@Override
protected boolean shouldParsePayload() {
return (format == VIDEO_CODEC_AVC && frameType != VIDEO_FRAME_VIDEO_INFO);
}
}