queuedOutputBuffers;
+ private final InputBuffer[] availableInputBuffers;
+ private final OutputBuffer[] availableOutputBuffers;
+ private int availableInputBufferCount;
+ private int availableOutputBufferCount;
+
+ private int skipSamples;
+ private boolean flushDecodedOutputBuffer;
+ private boolean released;
+
+ private int seekPreRoll;
+
+ private OpusDecoderException decoderException;
+
+ /**
+ * @param headerBytes Opus header data that is used to initialize the decoder. For WebM Container,
+ * this comes from the CodecPrivate Track element.
+ * @param codecDelayNs Delay in nanoseconds added by the codec at the beginning. For WebM
+ * Container, this comes from the CodecDelay Track Element. Can be -1 in which case the value
+ * from the codec header will be used.
+ * @param seekPreRollNs Duration in nanoseconds of samples to discard when there is a
+ * discontinuity. For WebM Container, this comes from the SeekPreRoll Track Element. Can be -1
+ * in which case the default value of 80ns will be used.
+ * @throws OpusDecoderException if an exception occurs when initializing the decoder.
+ */
+ public OpusDecoderWrapper(byte[] headerBytes, long codecDelayNs,
+ long seekPreRollNs) throws OpusDecoderException {
+ lock = new Object();
+ opusHeader = parseOpusHeader(headerBytes);
+ skipSamples = (codecDelayNs == -1) ? opusHeader.skipSamples : nsToSamples(codecDelayNs);
+ seekPreRoll = (seekPreRoll == -1) ? DEFAULT_SEEK_PRE_ROLL : nsToSamples(seekPreRollNs);
+ queuedInputBuffers = new LinkedList<>();
+ queuedOutputBuffers = new LinkedList<>();
+ availableInputBuffers = new InputBuffer[NUM_BUFFERS];
+ availableOutputBuffers = new OutputBuffer[NUM_BUFFERS];
+ availableInputBufferCount = NUM_BUFFERS;
+ availableOutputBufferCount = NUM_BUFFERS;
+ for (int i = 0; i < NUM_BUFFERS; i++) {
+ availableInputBuffers[i] = new InputBuffer();
+ availableOutputBuffers[i] = new OutputBuffer();
+ }
+ }
+
+ public InputBuffer getInputBuffer() throws OpusDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ if (availableInputBufferCount == 0) {
+ return null;
+ }
+ InputBuffer inputBuffer = availableInputBuffers[--availableInputBufferCount];
+ inputBuffer.reset();
+ return inputBuffer;
+ }
+ }
+
+ public void queueInputBuffer(InputBuffer inputBuffer) throws OpusDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ queuedInputBuffers.addLast(inputBuffer);
+ maybeNotifyDecodeLoop();
+ }
+ }
+
+ public OutputBuffer dequeueOutputBuffer() throws OpusDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ if (queuedOutputBuffers.isEmpty()) {
+ return null;
+ }
+ return queuedOutputBuffers.removeFirst();
+ }
+ }
+
+ public void releaseOutputBuffer(OutputBuffer outputBuffer) throws OpusDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ outputBuffer.reset();
+ availableOutputBuffers[availableOutputBufferCount++] = outputBuffer;
+ maybeNotifyDecodeLoop();
+ }
+ }
+
+ public void flush() {
+ synchronized (lock) {
+ flushDecodedOutputBuffer = true;
+ while (!queuedInputBuffers.isEmpty()) {
+ availableInputBuffers[availableInputBufferCount++] = queuedInputBuffers.removeFirst();
+ }
+ while (!queuedOutputBuffers.isEmpty()) {
+ availableOutputBuffers[availableOutputBufferCount++] = queuedOutputBuffers.removeFirst();
+ }
+ }
+ }
+
+ public void release() {
+ synchronized (lock) {
+ released = true;
+ lock.notify();
+ }
+ try {
+ join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ private void maybeThrowDecoderError() throws OpusDecoderException {
+ if (decoderException != null) {
+ throw decoderException;
+ }
+ }
+
+ /**
+ * Notifies the decode loop if there exists a queued input buffer and an available output buffer
+ * to decode into.
+ *
+ * Should only be called whilst synchronized on the lock object.
+ */
+ private void maybeNotifyDecodeLoop() {
+ if (!queuedInputBuffers.isEmpty() && availableOutputBufferCount > 0) {
+ lock.notify();
+ }
+ }
+
+ @Override
+ public void run() {
+ OpusDecoder decoder = null;
+ try {
+ decoder = new OpusDecoder(opusHeader);
+ while (decodeBuffer(decoder)) {
+ // Do nothing.
+ }
+ } catch (OpusDecoderException e) {
+ synchronized (lock) {
+ decoderException = e;
+ }
+ } catch (InterruptedException e) {
+ // Shouldn't ever happen.
+ } finally {
+ if (decoder != null) {
+ decoder.close();
+ }
+ }
+ }
+
+ private boolean decodeBuffer(OpusDecoder decoder) throws InterruptedException,
+ OpusDecoderException {
+ InputBuffer inputBuffer;
+ OutputBuffer outputBuffer;
+
+ // Wait until we have an input buffer to decode, and an output buffer to decode into.
+ synchronized (lock) {
+ while (!released && (queuedInputBuffers.isEmpty() || availableOutputBufferCount == 0)) {
+ lock.wait();
+ }
+ if (released) {
+ return false;
+ }
+ inputBuffer = queuedInputBuffers.removeFirst();
+ outputBuffer = availableOutputBuffers[--availableOutputBufferCount];
+ flushDecodedOutputBuffer = false;
+ }
+
+ // Decode.
+ if (inputBuffer.getFlag(FLAG_END_OF_STREAM)) {
+ outputBuffer.setFlag(FLAG_END_OF_STREAM);
+ } else {
+ if (inputBuffer.getFlag(FLAG_RESET_DECODER)) {
+ decoder.reset();
+ // When seeking to 0, skip number of samples as specified in opus header. When seeking to
+ // any other time, skip number of samples as specified by seek preroll.
+ skipSamples = (inputBuffer.sampleHolder.timeUs == 0) ? opusHeader.skipSamples : seekPreRoll;
+ }
+ SampleHolder sampleHolder = inputBuffer.sampleHolder;
+ sampleHolder.data.position(sampleHolder.data.position() - sampleHolder.size);
+ outputBuffer.timestampUs = sampleHolder.timeUs;
+ outputBuffer.size = decoder.decode(sampleHolder.data, sampleHolder.size,
+ outputBuffer.data, outputBuffer.data.capacity());
+ outputBuffer.data.position(0);
+ if (skipSamples > 0) {
+ int bytesPerSample = opusHeader.channelCount * 2;
+ int skipBytes = skipSamples * bytesPerSample;
+ if (outputBuffer.size < skipBytes) {
+ skipSamples -= outputBuffer.size / bytesPerSample;
+ outputBuffer.size = 0;
+ } else {
+ skipSamples = 0;
+ outputBuffer.data.position(skipBytes);
+ outputBuffer.size -= skipBytes;
+ }
+ }
+ }
+
+ synchronized (lock) {
+ if (flushDecodedOutputBuffer
+ || inputBuffer.sampleHolder.isDecodeOnly()
+ || outputBuffer.size == 0) {
+ // In the following cases, we make the output buffer available again rather than queuing it
+ // to be consumed:
+ // 1) A flush occured whilst we were decoding.
+ // 2) The input sample has decodeOnly flag set.
+ // 3) We skip the entire buffer due to skipSamples being greater than bytes decoded.
+ outputBuffer.reset();
+ availableOutputBuffers[availableOutputBufferCount++] = outputBuffer;
+ } else {
+ // Queue the decoded output buffer to be consumed.
+ queuedOutputBuffers.addLast(outputBuffer);
+ }
+ // Make the input buffer available again.
+ availableInputBuffers[availableInputBufferCount++] = inputBuffer;
+ }
+
+ return true;
+ }
+
+ private OpusHeader parseOpusHeader(byte[] headerBytes) throws OpusDecoderException {
+ final int maxChannelCount = 8;
+ final int maxChannelCountWithDefaultLayout = 2;
+ final int headerSize = 19;
+ final int headerChannelCountOffset = 9;
+ final int headerSkipSamplesOffset = 10;
+ final int headerGainOffset = 16;
+ final int headerChannelMappingOffset = 18;
+ final int headerNumStreamsOffset = headerSize;
+ final int headerNumCoupledOffset = headerNumStreamsOffset + 1;
+ final int headerStreamMapOffset = headerNumStreamsOffset + 2;
+ OpusHeader opusHeader = new OpusHeader();
+ try {
+ // Opus streams are always decoded at 48000 hz.
+ opusHeader.sampleRate = 48000;
+ opusHeader.channelCount = headerBytes[headerChannelCountOffset];
+ if (opusHeader.channelCount > maxChannelCount) {
+ throw new OpusDecoderException("Invalid channel count: " + opusHeader.channelCount);
+ }
+ opusHeader.skipSamples = readLittleEndian16(headerBytes, headerSkipSamplesOffset);
+ opusHeader.gain = readLittleEndian16(headerBytes, headerGainOffset);
+ opusHeader.channelMapping = headerBytes[headerChannelMappingOffset];
+
+ if (opusHeader.channelMapping == 0) {
+ // If there is no channel mapping, use the defaults.
+ if (opusHeader.channelCount > maxChannelCountWithDefaultLayout) {
+ throw new OpusDecoderException("Invalid Header, missing stream map.");
+ }
+ opusHeader.numStreams = 1;
+ opusHeader.numCoupled = (opusHeader.channelCount > 1) ? 1 : 0;
+ opusHeader.streamMap[0] = 0;
+ opusHeader.streamMap[1] = 1;
+ } else {
+ // Read the channel mapping.
+ opusHeader.numStreams = headerBytes[headerNumStreamsOffset];
+ opusHeader.numCoupled = headerBytes[headerNumCoupledOffset];
+ for (int i = 0; i < opusHeader.channelCount; i++) {
+ opusHeader.streamMap[i] = headerBytes[headerStreamMapOffset + i];
+ }
+ }
+ return opusHeader;
+ } catch (ArrayIndexOutOfBoundsException e) {
+ throw new OpusDecoderException("Header size is too small.");
+ }
+ }
+
+ private int readLittleEndian16(byte[] input, int offset) {
+ int value = input[offset];
+ value |= input[offset + 1] << 8;
+ return value;
+ }
+
+ private int nsToSamples(long ns) {
+ return (int) (ns * opusHeader.sampleRate / 1000000000);
+ }
+
+ /* package */ static final class InputBuffer {
+
+ public final SampleHolder sampleHolder;
+
+ public int flags;
+
+ public InputBuffer() {
+ sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT);
+ sampleHolder.data = ByteBuffer.allocateDirect(INPUT_BUFFER_SIZE);
+ }
+
+ public void reset() {
+ sampleHolder.data.clear();
+ flags = 0;
+ }
+
+ public void setFlag(int flag) {
+ flags |= flag;
+ }
+
+ public boolean getFlag(int flag) {
+ return (flags & flag) == flag;
+ }
+
+ }
+
+ /* package */ static final class OutputBuffer {
+
+ public ByteBuffer data;
+ public int size;
+ public long timestampUs;
+ public int flags;
+
+ public OutputBuffer() {
+ data = ByteBuffer.allocateDirect(OUTPUT_BUFFER_SIZE);
+ }
+
+ public void reset() {
+ data.clear();
+ size = 0;
+ flags = 0;
+ }
+
+ public void setFlag(int flag) {
+ flags |= flag;
+ }
+
+ public boolean getFlag(int flag) {
+ return (flags & flag) == flag;
+ }
+
+ }
+
+ /* package */ static final class OpusHeader {
+
+ public int sampleRate;
+ public int channelCount;
+ public int skipSamples;
+ public int gain;
+ public int channelMapping;
+ public int numStreams;
+ public int numCoupled;
+ public byte[] streamMap;
+
+ public OpusHeader() {
+ streamMap = new byte[8];
+ }
+
+ }
+
+}
diff --git a/extensions/opus/src/main/jni/Android.mk b/extensions/opus/src/main/jni/Android.mk
new file mode 100644
index 0000000000..7ca8300316
--- /dev/null
+++ b/extensions/opus/src/main/jni/Android.mk
@@ -0,0 +1,34 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+WORKING_DIR := $(call my-dir)
+include $(CLEAR_VARS)
+APP_PLATFORM := android-10
+
+# build libopus.so
+LOCAL_PATH := $(WORKING_DIR)
+include libopus.mk
+
+# build libopusJNI.so
+include $(CLEAR_VARS)
+LOCAL_PATH := $(WORKING_DIR)
+LOCAL_MODULE := libopusJNI
+LOCAL_ARM_MODE := arm
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := opus_jni.cc
+LOCAL_LDLIBS := -llog -lz -lm
+LOCAL_SHARED_LIBRARIES := libopus
+include $(BUILD_SHARED_LIBRARY)
diff --git a/extensions/opus/src/main/jni/Application.mk b/extensions/opus/src/main/jni/Application.mk
new file mode 100644
index 0000000000..6563af0f50
--- /dev/null
+++ b/extensions/opus/src/main/jni/Application.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+APP_OPTIM := release
+APP_STL := gnustl_static
+APP_CPPFLAGS := -frtti
diff --git a/extensions/opus/src/main/jni/convert_android_asm.sh b/extensions/opus/src/main/jni/convert_android_asm.sh
new file mode 100755
index 0000000000..6d75f094ae
--- /dev/null
+++ b/extensions/opus/src/main/jni/convert_android_asm.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -e
+ASM_CONVERTER="./libopus/celt/arm/arm2gnu.pl"
+
+if [[ ! -x "${ASM_CONVERTER}" ]]; then
+ echo "Please make sure you have checked out libopus."
+ exit
+fi
+
+while read file; do
+ # This check is required because the ASM conversion script doesn't seem to be
+ # idempotent.
+ if [[ ! "${file}" =~ .*_gnu\.s$ ]]; then
+ gnu_file="${file%.s}_gnu.s"
+ ${ASM_CONVERTER} "${file}" > "${gnu_file}"
+ # The ASM conversion script replaces includes with *_gnu.S. So, replace
+ # occurences of "*-gnu.S" with "*_gnu.s".
+ sed -i "s/-gnu\.S/_gnu\.s/g" "${gnu_file}"
+ rm -f "${file}"
+ fi
+done < <(find . -iname '*.s')
+
+# Generate armopts.s from armopts.s.in
+sed \
+ -e "s/@OPUS_ARM_MAY_HAVE_EDSP@/1/g" \
+ -e "s/@OPUS_ARM_MAY_HAVE_MEDIA@/1/g" \
+ -e "s/@OPUS_ARM_MAY_HAVE_NEON@/1/g" \
+ libopus/celt/arm/armopts.s.in > libopus/celt/arm/armopts.s.temp
+${ASM_CONVERTER} "libopus/celt/arm/armopts.s.temp" > "libopus/celt/arm/armopts_gnu.s"
+rm "libopus/celt/arm/armopts.s.temp"
+echo "Converted all ASM files and generated armopts.s successfully."
diff --git a/extensions/opus/src/main/jni/libopus.mk b/extensions/opus/src/main/jni/libopus.mk
new file mode 100644
index 0000000000..2eb5476e66
--- /dev/null
+++ b/extensions/opus/src/main/jni/libopus.mk
@@ -0,0 +1,50 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH := $(call my-dir)/libopus
+
+include $(CLEAR_VARS)
+
+include $(LOCAL_PATH)/celt_headers.mk
+include $(LOCAL_PATH)/celt_sources.mk
+include $(LOCAL_PATH)/opus_headers.mk
+include $(LOCAL_PATH)/opus_sources.mk
+include $(LOCAL_PATH)/silk_headers.mk
+include $(LOCAL_PATH)/silk_sources.mk
+
+LOCAL_MODULE := libopus
+LOCAL_ARM_MODE := arm
+LOCAL_CFLAGS := -DOPUS_BUILD -DFIXED_POINT -DUSE_ALLOCA -DHAVE_LRINT \
+ -DHAVE_LRINTF
+LOCAL_C_INCLUDES := $(LOCAL_PATH)/include $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/silk $(LOCAL_PATH)/celt \
+ $(LOCAL_PATH)/silk/fixed
+LOCAL_SRC_FILES := $(CELT_SOURCES) $(OPUS_SOURCES) $(OPUS_SOURCES_FLOAT) \
+ $(SILK_SOURCES) $(SILK_SOURCES_FIXED)
+
+ifneq ($(findstring armeabi-v7a, $(TARGET_ARCH_ABI)),)
+LOCAL_SRC_FILES += $(CELT_SOURCES_ARM)
+LOCAL_SRC_FILES += celt/arm/armopts_gnu.s.neon
+LOCAL_SRC_FILES += $(subst .s,_gnu.s.neon,$(CELT_SOURCES_ARM_ASM))
+LOCAL_CFLAGS += -DOPUS_ARM_ASM -DOPUS_ARM_INLINE_ASM -DOPUS_ARM_INLINE_EDSP \
+ -DOPUS_ARM_INLINE_MEDIA -DOPUS_ARM_INLINE_NEON \
+ -DOPUS_ARM_MAY_HAVE_NEON -DOPUS_ARM_MAY_HAVE_MEDIA \
+ -DOPUS_ARM_MAY_HAVE_EDSP
+endif
+
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/extensions/opus/src/main/jni/opus_jni.cc b/extensions/opus/src/main/jni/opus_jni.cc
new file mode 100644
index 0000000000..0259592c94
--- /dev/null
+++ b/extensions/opus/src/main/jni/opus_jni.cc
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include
+
+#include
+
+#include
+
+#include "opus.h" // NOLINT
+#include "opus_multistream.h" // NOLINT
+
+#define LOG_TAG "libopus_native"
+#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, \
+ __VA_ARGS__))
+
+#define FUNC(RETURN_TYPE, NAME, ...) \
+ extern "C" { \
+ JNIEXPORT RETURN_TYPE \
+ Java_com_google_android_exoplayer_ext_opus_OpusDecoder_ ## NAME \
+ (JNIEnv* env, jobject thiz, ##__VA_ARGS__);\
+ } \
+ JNIEXPORT RETURN_TYPE \
+ Java_com_google_android_exoplayer_ext_opus_OpusDecoder_ ## NAME \
+ (JNIEnv* env, jobject thiz, ##__VA_ARGS__)\
+
+jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ JNIEnv* env;
+ if (vm->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_6) != JNI_OK) {
+ return -1;
+ }
+ return JNI_VERSION_1_6;
+}
+
+static int channelCount;
+
+FUNC(jlong, opusInit, jint sampleRate, jint channelCount, jint numStreams,
+ jint numCoupled, jint gain, jbyteArray jStreamMap) {
+ int status = OPUS_INVALID_STATE;
+ ::channelCount = channelCount;
+ jbyte* streamMapBytes = env->GetByteArrayElements(jStreamMap, 0);
+ uint8_t* streamMap = reinterpret_cast(streamMapBytes);
+ OpusMSDecoder* decoder = opus_multistream_decoder_create(
+ sampleRate, channelCount, numStreams, numCoupled, streamMap, &status);
+ env->ReleaseByteArrayElements(jStreamMap, streamMapBytes, 0);
+ if (!decoder || status != OPUS_OK) {
+ LOGE("Failed to create Opus Decoder; status=%s", opus_strerror(status));
+ return 0;
+ }
+ status = opus_multistream_decoder_ctl(decoder, OPUS_SET_GAIN(gain));
+ if (status != OPUS_OK) {
+ LOGE("Failed to set Opus header gain; status=%s", opus_strerror(status));
+ return 0;
+ }
+ return reinterpret_cast(decoder);
+}
+
+FUNC(jint, opusDecode, jlong jDecoder, jobject jInputBuffer, jint inputSize,
+ jobject jOutputBuffer, jint outputSize) {
+ OpusMSDecoder* decoder = reinterpret_cast(jDecoder);
+ const uint8_t* inputBuffer =
+ reinterpret_cast(
+ env->GetDirectBufferAddress(jInputBuffer));
+ int16_t* outputBuffer = reinterpret_cast(
+ env->GetDirectBufferAddress(jOutputBuffer));
+ int numFrames = opus_multistream_decode(decoder, inputBuffer, inputSize,
+ outputBuffer, outputSize, 0);
+ return (numFrames < 0) ? numFrames : numFrames * 2 * channelCount;
+}
+
+FUNC(void, opusClose, jlong jDecoder) {
+ OpusMSDecoder* decoder = reinterpret_cast(jDecoder);
+ opus_multistream_decoder_destroy(decoder);
+}
+
+FUNC(void, opusReset, jlong jDecoder) {
+ OpusMSDecoder* decoder = reinterpret_cast(jDecoder);
+ opus_multistream_decoder_ctl(decoder, OPUS_RESET_STATE);
+}
+
+FUNC(jstring, opusGetErrorMessage, jint errorCode) {
+ return env->NewStringUTF(opus_strerror(errorCode));
+}
diff --git a/extensions/opus/src/main/project.properties b/extensions/opus/src/main/project.properties
new file mode 100644
index 0000000000..2ed62fbfcf
--- /dev/null
+++ b/extensions/opus/src/main/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+android.library=true
+android.library.reference.1=../../../../library/src/main
diff --git a/extensions/opus/src/main/res/.README.txt b/extensions/opus/src/main/res/.README.txt
new file mode 100644
index 0000000000..c27147ce56
--- /dev/null
+++ b/extensions/opus/src/main/res/.README.txt
@@ -0,0 +1,2 @@
+This file is needed to make sure the res directory is present.
+The file is ignored by the Android toolchain because its name starts with a dot.
diff --git a/extensions/vp9/README.md b/extensions/vp9/README.md
new file mode 100644
index 0000000000..da5d0d2a50
--- /dev/null
+++ b/extensions/vp9/README.md
@@ -0,0 +1,134 @@
+# ExoPlayer VP9 Extension #
+
+## Description ##
+
+The VP9 Extension is a [Track Renderer][] implementation that helps you bundle libvpx (the VP9 decoding library) into your app and use it along with ExoPlayer to play VP9 video on Android devices.
+
+[Track Renderer]: http://google.github.io/ExoPlayer/doc/reference/com/google/android/exoplayer/TrackRenderer.html
+
+## Build Instructions (Android Studio and Eclipse) ##
+
+Building the VP9 Extension involves building libvpx and JNI bindings using the Android NDK and linking it into your app. The following steps will tell you how to do that using Android Studio or Eclipse.
+
+* Checkout ExoPlayer along with Extensions
+
+```
+git clone https://github.com/google/ExoPlayer.git
+```
+
+* Set the following environment variables:
+
+```
+cd ""
+EXOPLAYER_ROOT="$(pwd)"
+VP9_EXT_PATH="${EXOPLAYER_ROOT}/extensions/vp9/src/main"
+```
+
+* Download the [Android NDK][] and set its location in an environment variable:
+
+```
+NDK_PATH=""
+```
+
+* Fetch libvpx and libyuv
+
+```
+cd "${VP9_EXT_PATH}/jni" && \
+git clone https://chromium.googlesource.com/webm/libvpx libvpx && \
+git clone http://git.chromium.org/external/libyuv.git libyuv
+```
+
+* Run a script that generates necessary configuration files for libvpx
+
+```
+cd ${VP9_EXT_PATH}/jni && \
+./generate_libvpx_android_configs.sh "${NDK_PATH}"
+```
+
+### Android Studio ###
+
+For Android Studio, we build the native libraries from the command line and then Gradle will pick it up when building your app using Android Studio.
+
+* Build the JNI native libraries
+
+```
+cd "${VP9_EXT_PATH}"/jni && \
+${NDK_PATH}/ndk-build APP_ABI=all -j4
+```
+
+* In your project, you can add a dependency to the VP9 Extension by using a the following rule
+
+```
+// in settings.gradle
+include ':..:ExoPlayer:library'
+include ':..:ExoPlayer:vp9-extension'
+
+// in build.gradle
+dependencies {
+ compile project(':..:ExoPlayer:library')
+ compile project(':..:ExoPlayer:vp9-extension')
+}
+```
+
+* Now, when you build your app, the VP9 extension will be built and the native libraries will be packaged along with the APK.
+
+### Eclipse ###
+
+* The following steps assume that you have installed Eclipse and configured it with the [Android SDK][] and [Android NDK ][]:
+ * Navigate to File->Import->General->Existing Projects into Workspace
+ * Select the root directory of the repository
+ * Import the following projects:
+ * ExoPlayerLib
+ * ExoPlayerExt-VP9
+ * If you are able to build ExoPlayerExt-VP9 project, then you're all set.
+ * (Optional) To speed up the NDK build:
+ * Right click on ExoPlayerExt-VP9 in the Project Explorer pane and choose Properties
+ * Click on C/C++ Build
+ * Uncheck `Use default build command`
+ * In `Build Command` enter: `ndk-build -j4` (adjust 4 to a reasonable number depending on the number of cores in your computer)
+ * Click Apply
+
+You can now create your own Android App project and add ExoPlayerLib along with ExoPlayerExt-VP9 as a dependencies to use ExoPlayer along with the VP9 Extension.
+
+
+[Android NDK]: https://developer.android.com/tools/sdk/ndk/index.html
+
+[Android NDK ]: http://tools.android.com/recent/usingthendkplugin
+[Android SDK]: http://developer.android.com/sdk/installing/index.html?pkg=tools
+
+## Building for various Architectures ##
+
+### Android Studio ###
+
+The manual invocation of `ndk-build` will build the library for all architectures and the correct one will be picked up from the APK based on the device its running on.
+
+### Eclipse ###
+
+libvpx is optimized for various architectures (like neon, x86, etc.). The `generate_libvpx_android_configs.sh` script generates Android configurations for the following architectures:
+
+* armeabi (the default - does not include neon optimizations)
+* armeabi-v7a (choose this to enable neon optimizations)
+* mips
+* x86
+* all (will result in a larger binary but will cover all architectures)
+
+You can build for a specific architecture in two ways:
+
+* Method 1 (edit `Application.mk`)
+ * Edit `${VP9_EXT_PATH}/jni/Application.mk` and add the following line `APP_ABI := ` (where `` is one of the above 4 architectures)
+* Method 2 (pass NDK build flag)
+ * Right click on ExoPlayerExt-VP9 in the Project Explorer pane and choose Properties
+ * Click on C/C++ Build
+ * Uncheck `Use default build command`
+ * In `Build Command` enter: `ndk-build APP_ABI=` (where `` is one of the above 4 architectures)
+ * Click Apply
+
+## Other Things to Note ##
+
+* Every time there is a change to the libvpx checkout:
+ * Android config scripts should be re-generated by running `generate_libvpx_android_configs.sh`
+ * Clean and re-build the project.
+* If you want to use your own version of libvpx or libyuv, place it in `${VP9_EXT_PATH}/jni/libvpx` or `${VP9_EXT_PATH}/jni/libyuv` respectively.
+
diff --git a/extensions/vp9/build.gradle b/extensions/vp9/build.gradle
new file mode 100644
index 0000000000..443d0d6601
--- /dev/null
+++ b/extensions/vp9/build.gradle
@@ -0,0 +1,45 @@
+// Copyright (C) 2014 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 22
+ buildToolsVersion "22.0.1"
+
+ defaultConfig {
+ minSdkVersion 9
+ targetSdkVersion 22
+ }
+
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
+ }
+ }
+
+ lintOptions {
+ abortOnError false
+ }
+
+ sourceSets.main {
+ jniLibs.srcDir 'src/main/libs'
+ jni.srcDirs = [] // Disable the automatic ndk-build call by Android Studio.
+ }
+}
+
+dependencies {
+ compile project(':library')
+}
+
diff --git a/extensions/vp9/src/main/.classpath b/extensions/vp9/src/main/.classpath
new file mode 100644
index 0000000000..fd895b0917
--- /dev/null
+++ b/extensions/vp9/src/main/.classpath
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/extensions/vp9/src/main/.cproject b/extensions/vp9/src/main/.cproject
new file mode 100644
index 0000000000..8548e48eb9
--- /dev/null
+++ b/extensions/vp9/src/main/.cproject
@@ -0,0 +1,57 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/extensions/vp9/src/main/.project b/extensions/vp9/src/main/.project
new file mode 100644
index 0000000000..3811626cb3
--- /dev/null
+++ b/extensions/vp9/src/main/.project
@@ -0,0 +1,97 @@
+
+
+ ExoPlayerExt-VP9
+
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.genmakebuilder
+ clean,full,incremental,
+
+
+ ?children?
+ ?name?=outputEntries\|?children?=?name?=entry\\\\\\\|\\\|?name?=entry\\\\\\\|\\\|\||
+
+
+ ?name?
+
+
+
+ org.eclipse.cdt.make.core.append_environment
+ true
+
+
+ org.eclipse.cdt.make.core.buildArguments
+
+
+
+ org.eclipse.cdt.make.core.buildCommand
+ ndk-build
+
+
+ org.eclipse.cdt.make.core.cleanBuildTarget
+ clean
+
+
+ org.eclipse.cdt.make.core.contents
+ org.eclipse.cdt.make.core.activeConfigSettings
+
+
+ org.eclipse.cdt.make.core.enableAutoBuild
+ false
+
+
+ org.eclipse.cdt.make.core.enableCleanBuild
+ true
+
+
+ org.eclipse.cdt.make.core.enableFullBuild
+ true
+
+
+ org.eclipse.cdt.make.core.stopOnError
+ true
+
+
+ org.eclipse.cdt.make.core.useDefaultBuildCmd
+ true
+
+
+
+
+ com.android.ide.eclipse.adt.ResourceManagerBuilder
+
+
+
+
+ com.android.ide.eclipse.adt.PreCompilerBuilder
+
+
+
+
+ org.eclipse.jdt.core.javabuilder
+
+
+
+
+ com.android.ide.eclipse.adt.ApkBuilder
+
+
+
+
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder
+ full,incremental,
+
+
+
+
+
+ com.android.ide.eclipse.adt.AndroidNature
+ org.eclipse.jdt.core.javanature
+ org.eclipse.cdt.core.cnature
+ org.eclipse.cdt.core.ccnature
+ org.eclipse.cdt.managedbuilder.core.managedBuildNature
+ org.eclipse.cdt.managedbuilder.core.ScannerConfigNature
+
+
diff --git a/extensions/vp9/src/main/.settings/org.eclipse.jdt.core.prefs b/extensions/vp9/src/main/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000000..d17b6724d1
--- /dev/null
+++ b/extensions/vp9/src/main/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,12 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
+org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
+org.eclipse.jdt.core.compiler.compliance=1.7
+org.eclipse.jdt.core.compiler.debug.lineNumber=generate
+org.eclipse.jdt.core.compiler.debug.localVariable=generate
+org.eclipse.jdt.core.compiler.debug.sourceFile=generate
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.7
diff --git a/extensions/vp9/src/main/AndroidManifest.xml b/extensions/vp9/src/main/AndroidManifest.xml
new file mode 100644
index 0000000000..3e21f70510
--- /dev/null
+++ b/extensions/vp9/src/main/AndroidManifest.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/LibvpxVideoTrackRenderer.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/LibvpxVideoTrackRenderer.java
new file mode 100644
index 0000000000..0692e7ed47
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/LibvpxVideoTrackRenderer.java
@@ -0,0 +1,505 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+import com.google.android.exoplayer.ExoPlaybackException;
+import com.google.android.exoplayer.ExoPlayer;
+import com.google.android.exoplayer.MediaFormat;
+import com.google.android.exoplayer.MediaFormatHolder;
+import com.google.android.exoplayer.SampleSource;
+import com.google.android.exoplayer.TrackRenderer;
+import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.InputBuffer;
+import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
+import com.google.android.exoplayer.util.MimeTypes;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.os.Handler;
+import android.os.SystemClock;
+import android.view.Surface;
+
+import java.io.IOException;
+
+/**
+ * Decodes and renders video using the native VP9 decoder.
+ */
+public class LibvpxVideoTrackRenderer extends TrackRenderer {
+
+ /**
+ * Interface definition for a callback to be notified of {@link LibvpxVideoTrackRenderer} events.
+ */
+ public interface EventListener {
+
+ /**
+ * Invoked to report the number of frames dropped by the renderer. Dropped frames are reported
+ * whenever the renderer is stopped having dropped frames, and optionally, whenever the count
+ * reaches a specified threshold whilst the renderer is started.
+ *
+ * @param count The number of dropped frames.
+ * @param elapsed The duration in milliseconds over which the frames were dropped. This
+ * duration is timed from when the renderer was started or from when dropped frames were
+ * last reported (whichever was more recent), and not from when the first of the reported
+ * drops occurred.
+ */
+ void onDroppedFrames(int count, long elapsed);
+
+ /**
+ * Invoked each time there's a change in the size of the video being rendered.
+ *
+ * @param width The video width in pixels.
+ * @param height The video height in pixels.
+ */
+ void onVideoSizeChanged(int width, int height);
+
+ /**
+ * Invoked when a frame is rendered to a surface for the first time following that surface
+ * having been set as the target for the renderer.
+ *
+ * @param surface The surface to which a first frame has been rendered.
+ */
+ void onDrawnToSurface(Surface surface);
+
+ /**
+ * Invoked when one of the following happens: libvpx initialization failure, decoder error,
+ * renderer error.
+ *
+ * @param e The corresponding exception.
+ */
+ void onDecoderError(VpxDecoderException e);
+
+ }
+
+ /**
+ * The type of a message that can be passed to an instance of this class via
+ * {@link ExoPlayer#sendMessage} or {@link ExoPlayer#blockingSendMessage}. The message object
+ * should be the target {@link Surface}, or null.
+ */
+ public static final int MSG_SET_SURFACE = 1;
+ public static final int MSG_SET_VPX_SURFACE_VIEW = 2;
+
+ private final SampleSource source;
+ private final boolean scaleToFit;
+ private final Handler eventHandler;
+ private final EventListener eventListener;
+ private final int maxDroppedFrameCountToNotify;
+ private final MediaFormatHolder formatHolder;
+
+ private MediaFormat format;
+ private VpxDecoderWrapper decoder;
+ private InputBuffer inputBuffer;
+ private OutputBuffer outputBuffer;
+
+ private Bitmap bitmap;
+ private boolean drawnToSurface;
+ private boolean renderedFirstFrame;
+ private Surface surface;
+ private VpxVideoSurfaceView vpxVideoSurfaceView;
+ private boolean outputRgb;
+
+ private int trackIndex;
+ private long currentPositionUs;
+ private boolean inputStreamEnded;
+ private boolean outputStreamEnded;
+ private boolean sourceIsReady;
+ private int previousWidth;
+ private int previousHeight;
+
+ private int droppedFrameCount;
+ private long droppedFrameAccumulationStartTimeMs;
+
+ /**
+ * @param source The upstream source from which the renderer obtains samples.
+ * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when
+ * rendering.
+ */
+ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit) {
+ this(source, scaleToFit, null, null, 0);
+ }
+
+ /**
+ * @param source The upstream source from which the renderer obtains samples.
+ * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when
+ * rendering.
+ * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
+ * null if delivery of events is not required.
+ * @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between
+ * invocations of {@link EventListener#onDroppedFrames(int, long)}.
+ */
+ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit,
+ Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) {
+ this.source = source;
+ this.scaleToFit = scaleToFit;
+ this.eventHandler = eventHandler;
+ this.eventListener = eventListener;
+ this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify;
+ previousWidth = -1;
+ previousHeight = -1;
+ formatHolder = new MediaFormatHolder();
+ }
+
+ @Override
+ protected int doPrepare(long positionUs) throws ExoPlaybackException {
+ try {
+ boolean sourcePrepared = source.prepare(positionUs);
+ if (!sourcePrepared) {
+ return TrackRenderer.STATE_UNPREPARED;
+ }
+ } catch (IOException e) {
+ throw new ExoPlaybackException(e);
+ }
+
+ for (int i = 0; i < source.getTrackCount(); i++) {
+ if (source.getTrackInfo(i).mimeType.equalsIgnoreCase(MimeTypes.VIDEO_VP9)
+ || source.getTrackInfo(i).mimeType.equalsIgnoreCase(MimeTypes.VIDEO_WEBM)) {
+ trackIndex = i;
+ return TrackRenderer.STATE_PREPARED;
+ }
+ }
+
+ return TrackRenderer.STATE_IGNORE;
+ }
+
+ @Override
+ protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
+ if (outputStreamEnded) {
+ return;
+ }
+ try {
+ sourceIsReady = source.continueBuffering(positionUs);
+ checkForDiscontinuity();
+ if (format == null) {
+ readFormat();
+ } else {
+ // TODO: Add support for dynamic switching between one type of surface to another.
+ // Create the decoder.
+ if (decoder == null) {
+ decoder = new VpxDecoderWrapper(outputRgb);
+ decoder.start();
+ }
+ processOutputBuffer(positionUs, elapsedRealtimeUs);
+
+ // Queue input buffers.
+ while (feedInputBuffer()) {}
+ }
+ } catch (VpxDecoderException e) {
+ notifyDecoderError(e);
+ throw new ExoPlaybackException(e);
+ } catch (IOException e) {
+ throw new ExoPlaybackException(e);
+ }
+ }
+
+ private void processOutputBuffer(long positionUs, long elapsedRealtimeUs)
+ throws VpxDecoderException {
+ if (outputStreamEnded) {
+ return;
+ }
+
+ if (outputBuffer == null) {
+ outputBuffer = decoder.dequeueOutputBuffer();
+ if (outputBuffer == null) {
+ return;
+ }
+ }
+
+ if (outputBuffer.flags == VpxDecoderWrapper.FLAG_END_OF_STREAM) {
+ outputStreamEnded = true;
+ releaseOutputBuffer();
+ return;
+ }
+
+ long elapsedSinceStartOfLoop = SystemClock.elapsedRealtime() * 1000 - elapsedRealtimeUs;
+ long timeToRenderUs = outputBuffer.timestampUs - positionUs - elapsedSinceStartOfLoop;
+
+ if (timeToRenderUs < -30000 || outputBuffer.timestampUs < currentPositionUs) {
+ // Drop frame if we are too late.
+ droppedFrameCount++;
+ if (droppedFrameCount == maxDroppedFrameCountToNotify) {
+ notifyAndResetDroppedFrameCount();
+ }
+ releaseOutputBuffer();
+ return;
+ }
+
+ // If we have not renderered any frame so far (either initially or immediately following a
+ // seek), render one frame irresepective of the state.
+ if (!renderedFirstFrame) {
+ renderBuffer();
+ renderedFirstFrame = true;
+ return;
+ }
+
+ // Do nothing if we are not playing or if we are too early to render the next frame.
+ if (getState() != TrackRenderer.STATE_STARTED || timeToRenderUs > 30000) {
+ return;
+ }
+
+ if (timeToRenderUs > 11000) {
+ try {
+ // Subtracting 10000 rather than 11000 ensures that the sleep time
+ // will be at least 1ms.
+ Thread.sleep((timeToRenderUs - 10000) / 1000);
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ renderBuffer();
+ }
+
+ private void renderBuffer() throws VpxDecoderException {
+ notifyIfVideoSizeChanged(outputBuffer);
+ if (outputRgb) {
+ renderRgbFrame(outputBuffer, scaleToFit);
+ } else {
+ vpxVideoSurfaceView.renderFrame(outputBuffer);
+ }
+ if (!drawnToSurface) {
+ drawnToSurface = true;
+ notifyDrawnToSurface(surface);
+ }
+ releaseOutputBuffer();
+ }
+
+ private void releaseOutputBuffer() throws VpxDecoderException {
+ currentPositionUs = outputBuffer.timestampUs;
+ decoder.releaseOutputBuffer(outputBuffer);
+ outputBuffer = null;
+ }
+
+ private void renderRgbFrame(OutputBuffer outputBuffer, boolean scale) {
+ if (bitmap == null || bitmap.getWidth() != outputBuffer.width
+ || bitmap.getHeight() != outputBuffer.height) {
+ bitmap = Bitmap.createBitmap(outputBuffer.width, outputBuffer.height, Bitmap.Config.RGB_565);
+ }
+ bitmap.copyPixelsFromBuffer(outputBuffer.data);
+ Canvas canvas = surface.lockCanvas(null);
+ if (scale) {
+ canvas.scale(((float) canvas.getWidth()) / outputBuffer.width,
+ ((float) canvas.getHeight()) / outputBuffer.height);
+ }
+ canvas.drawBitmap(bitmap, 0, 0, null);
+ surface.unlockCanvasAndPost(canvas);
+ }
+
+ private boolean feedInputBuffer() throws IOException, VpxDecoderException {
+ if (inputStreamEnded) {
+ return false;
+ }
+
+ if (inputBuffer == null) {
+ inputBuffer = decoder.getInputBuffer();
+ if (inputBuffer == null) {
+ return false;
+ }
+ }
+
+ int result = source.readData(trackIndex, currentPositionUs, formatHolder,
+ inputBuffer.sampleHolder, false);
+ if (result == SampleSource.NOTHING_READ) {
+ return false;
+ }
+ if (result == SampleSource.DISCONTINUITY_READ) {
+ flushDecoder();
+ return true;
+ }
+ if (result == SampleSource.FORMAT_READ) {
+ format = formatHolder.format;
+ return true;
+ }
+ if (result == SampleSource.END_OF_STREAM) {
+ inputBuffer.flags = VpxDecoderWrapper.FLAG_END_OF_STREAM;
+ decoder.queueInputBuffer(inputBuffer);
+ inputBuffer = null;
+ inputStreamEnded = true;
+ return false;
+ }
+
+ inputBuffer.width = format.width;
+ inputBuffer.height = format.height;
+ decoder.queueInputBuffer(inputBuffer);
+ inputBuffer = null;
+ return true;
+ }
+
+ private void checkForDiscontinuity() throws IOException {
+ if (decoder == null) {
+ return;
+ }
+ int result = source.readData(trackIndex, currentPositionUs, formatHolder, null, true);
+ if (result == SampleSource.DISCONTINUITY_READ) {
+ flushDecoder();
+ }
+ }
+
+ private void flushDecoder() {
+ inputBuffer = null;
+ outputBuffer = null;
+ decoder.flush();
+ }
+
+ @Override
+ protected boolean isEnded() {
+ return outputStreamEnded;
+ }
+
+ @Override
+ protected boolean isReady() {
+ return format != null && sourceIsReady;
+ }
+
+ @Override
+ protected long getDurationUs() {
+ return source.getTrackInfo(trackIndex).durationUs;
+ }
+
+ @Override
+ protected long getCurrentPositionUs() {
+ return currentPositionUs;
+ }
+
+ @Override
+ protected long getBufferedPositionUs() {
+ long sourceBufferedPosition = source.getBufferedPositionUs();
+ return sourceBufferedPosition == UNKNOWN_TIME_US || sourceBufferedPosition == END_OF_TRACK_US
+ ? sourceBufferedPosition : Math.max(sourceBufferedPosition, getCurrentPositionUs());
+ }
+
+ @Override
+ protected void seekTo(long positionUs) throws ExoPlaybackException {
+ currentPositionUs = positionUs;
+ source.seekToUs(positionUs);
+ inputStreamEnded = false;
+ outputStreamEnded = false;
+ renderedFirstFrame = false;
+ sourceIsReady = false;
+ }
+
+ @Override
+ protected void onEnabled(long positionUs, boolean joining) {
+ source.enable(trackIndex, positionUs);
+ sourceIsReady = false;
+ inputStreamEnded = false;
+ outputStreamEnded = false;
+ renderedFirstFrame = false;
+ currentPositionUs = positionUs;
+ }
+
+ @Override
+ protected void onStarted() {
+ droppedFrameCount = 0;
+ droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime();
+ }
+
+ @Override
+ protected void onStopped() {
+ notifyAndResetDroppedFrameCount();
+ }
+
+ @Override
+ protected void onReleased() {
+ source.release();
+ }
+
+ @Override
+ protected void onDisabled() {
+ if (decoder != null) {
+ decoder.release();
+ decoder = null;
+ }
+ inputBuffer = null;
+ outputBuffer = null;
+ format = null;
+ source.disable(trackIndex);
+ }
+
+ private void readFormat() throws IOException {
+ int result = source.readData(trackIndex, currentPositionUs, formatHolder, null, false);
+ if (result == SampleSource.FORMAT_READ) {
+ format = formatHolder.format;
+ }
+ }
+
+ @Override
+ public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
+ if (messageType == MSG_SET_SURFACE) {
+ surface = (Surface) message;
+ vpxVideoSurfaceView = null;
+ outputRgb = true;
+ } else if (messageType == MSG_SET_VPX_SURFACE_VIEW) {
+ vpxVideoSurfaceView = (VpxVideoSurfaceView) message;
+ surface = null;
+ outputRgb = false;
+ } else {
+ super.handleMessage(messageType, message);
+ }
+ }
+
+ private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) {
+ if (previousWidth == -1 || previousHeight == -1
+ || previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) {
+ previousWidth = outputBuffer.width;
+ previousHeight = outputBuffer.height;
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onVideoSizeChanged(outputBuffer.width, outputBuffer.height);
+ }
+ });
+ }
+ }
+ }
+
+ private void notifyAndResetDroppedFrameCount() {
+ if (eventHandler != null && eventListener != null && droppedFrameCount > 0) {
+ long now = SystemClock.elapsedRealtime();
+ final int countToNotify = droppedFrameCount;
+ final long elapsedToNotify = now - droppedFrameAccumulationStartTimeMs;
+ droppedFrameCount = 0;
+ droppedFrameAccumulationStartTimeMs = now;
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDroppedFrames(countToNotify, elapsedToNotify);
+ }
+ });
+ }
+ }
+
+ private void notifyDrawnToSurface(final Surface surface) {
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDrawnToSurface(surface);
+ }
+ });
+ }
+ }
+
+ private void notifyDecoderError(final VpxDecoderException e) {
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDecoderError(e);
+ }
+ });
+ }
+ }
+
+}
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoder.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoder.java
new file mode 100644
index 0000000000..3a65c9cf77
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoder.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
+
+import java.nio.ByteBuffer;
+
+/**
+ * JNI Wrapper for the libvpx VP9 decoder.
+ */
+/* package */ class VpxDecoder {
+
+ private final long vpxDecContext;
+
+ static {
+ System.loadLibrary("vpx");
+ System.loadLibrary("vpxJNI");
+ }
+
+ /**
+ * Creates the VP9 Decoder.
+ *
+ * @throws VpxDecoderException if the decoder fails to initialize.
+ */
+ public VpxDecoder() throws VpxDecoderException {
+ vpxDecContext = vpxInit();
+ if (vpxDecContext == 0) {
+ throw new VpxDecoderException("libvpx initialization error: failed to initialize decoder");
+ }
+ }
+
+ /**
+ * Decodes a vp9 encoded frame and converts it to RGB565.
+ *
+ * @param encoded The encoded buffer.
+ * @param size Size of the encoded buffer.
+ * @param outputBuffer The buffer into which the decoded frame should be written.
+ * @param outputRgb True if the buffer should be converted to RGB color format. False if YUV
+ * format should be retained.
+ * @return 0 on success with a frame to render. 1 on success without a frame to render.
+ * @throws VpxDecoderException on decode failure.
+ */
+ public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer, boolean outputRgb)
+ throws VpxDecoderException {
+ if (vpxDecode(vpxDecContext, encoded, size) != 0) {
+ throw new VpxDecoderException("libvpx decode error: " + vpxGetErrorMessage(vpxDecContext));
+ }
+ return vpxGetFrame(vpxDecContext, outputBuffer, outputRgb);
+ }
+
+ /**
+ * Destroys the decoder.
+ */
+ public void close() {
+ vpxClose(vpxDecContext);
+ }
+
+ private native long vpxInit();
+ private native long vpxClose(long context);
+ private native long vpxDecode(long context, ByteBuffer encoded, int length);
+ private native int vpxGetFrame(long context, OutputBuffer outputBuffer, boolean outputRgb);
+ private native String vpxGetErrorMessage(long context);
+
+}
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderException.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderException.java
new file mode 100644
index 0000000000..1afa01a6c7
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderException.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+/**
+ * Thrown when a libvpx decoder error occurs.
+ */
+public class VpxDecoderException extends Exception {
+
+ public VpxDecoderException(String message) {
+ super(message);
+ }
+
+}
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderWrapper.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderWrapper.java
new file mode 100644
index 0000000000..c6ce0247c5
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxDecoderWrapper.java
@@ -0,0 +1,298 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+import com.google.android.exoplayer.SampleHolder;
+
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+
+/**
+ * Wraps {@link VpxDecoder}, exposing a higher level decoder interface.
+ */
+/* package */ class VpxDecoderWrapper extends Thread {
+
+ public static final int FLAG_END_OF_STREAM = 1;
+
+ private static final int INPUT_BUFFER_SIZE = 768 * 1024; // Value based on cs/SoftVpx.cpp.
+ private static final int NUM_BUFFERS = 16;
+
+ private final Object lock;
+ private final boolean outputRgb;
+
+ private final LinkedList queuedInputBuffers;
+ private final LinkedList queuedOutputBuffers;
+ private final InputBuffer[] availableInputBuffers;
+ private final OutputBuffer[] availableOutputBuffers;
+ private int availableInputBufferCount;
+ private int availableOutputBufferCount;
+
+ private boolean flushDecodedOutputBuffer;
+ private boolean released;
+
+ private VpxDecoderException decoderException;
+
+ /**
+ * @param outputRgb True if the decoded output is in RGB color format. False if it is in YUV
+ * color format.
+ */
+ public VpxDecoderWrapper(boolean outputRgb) {
+ lock = new Object();
+ this.outputRgb = outputRgb;
+ queuedInputBuffers = new LinkedList<>();
+ queuedOutputBuffers = new LinkedList<>();
+ availableInputBuffers = new InputBuffer[NUM_BUFFERS];
+ availableOutputBuffers = new OutputBuffer[NUM_BUFFERS];
+ availableInputBufferCount = NUM_BUFFERS;
+ availableOutputBufferCount = NUM_BUFFERS;
+ for (int i = 0; i < NUM_BUFFERS; i++) {
+ availableInputBuffers[i] = new InputBuffer();
+ availableOutputBuffers[i] = new OutputBuffer();
+ }
+ }
+
+ public InputBuffer getInputBuffer() throws VpxDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ if (availableInputBufferCount == 0) {
+ return null;
+ }
+ InputBuffer inputBuffer = availableInputBuffers[--availableInputBufferCount];
+ inputBuffer.flags = 0;
+ inputBuffer.sampleHolder.data.clear();
+ return inputBuffer;
+ }
+ }
+
+ public void queueInputBuffer(InputBuffer inputBuffer) throws VpxDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ queuedInputBuffers.addLast(inputBuffer);
+ maybeNotifyDecodeLoop();
+ }
+ }
+
+ public OutputBuffer dequeueOutputBuffer() throws VpxDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ if (queuedOutputBuffers.isEmpty()) {
+ return null;
+ }
+ return queuedOutputBuffers.removeFirst();
+ }
+ }
+
+ public void releaseOutputBuffer(OutputBuffer outputBuffer) throws VpxDecoderException {
+ synchronized (lock) {
+ maybeThrowDecoderError();
+ availableOutputBuffers[availableOutputBufferCount++] = outputBuffer;
+ maybeNotifyDecodeLoop();
+ }
+ }
+
+ public void flush() {
+ synchronized (lock) {
+ flushDecodedOutputBuffer = true;
+ while (!queuedInputBuffers.isEmpty()) {
+ availableInputBuffers[availableInputBufferCount++] = queuedInputBuffers.removeFirst();
+ }
+ while (!queuedOutputBuffers.isEmpty()) {
+ availableOutputBuffers[availableOutputBufferCount++] = queuedOutputBuffers.removeFirst();
+ }
+ }
+ }
+
+ public void release() {
+ synchronized (lock) {
+ released = true;
+ lock.notify();
+ }
+ try {
+ join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ private void maybeThrowDecoderError() throws VpxDecoderException {
+ if (decoderException != null) {
+ throw decoderException;
+ }
+ }
+
+ /**
+ * Notifies the decode loop if there exists a queued input buffer and an available output buffer
+ * to decode into.
+ *
+ * Should only be called whilst synchronized on the lock object.
+ */
+ private void maybeNotifyDecodeLoop() {
+ if (!queuedInputBuffers.isEmpty() && availableOutputBufferCount > 0) {
+ lock.notify();
+ }
+ }
+
+ @Override
+ public void run() {
+ VpxDecoder decoder = null;
+ try {
+ decoder = new VpxDecoder();
+ while (decodeBuffer(decoder)) {
+ // Do nothing.
+ }
+ } catch (VpxDecoderException e) {
+ synchronized (lock) {
+ decoderException = e;
+ }
+ } catch (InterruptedException e) {
+ // Shouldn't ever happen.
+ } finally {
+ if (decoder != null) {
+ decoder.close();
+ }
+ }
+ }
+
+ private boolean decodeBuffer(VpxDecoder decoder) throws InterruptedException,
+ VpxDecoderException {
+ InputBuffer inputBuffer;
+ OutputBuffer outputBuffer;
+
+ // Wait until we have an input buffer to decode, and an output buffer to decode into.
+ synchronized (lock) {
+ while (!released && (queuedInputBuffers.isEmpty() || availableOutputBufferCount == 0)) {
+ lock.wait();
+ }
+ if (released) {
+ return false;
+ }
+ inputBuffer = queuedInputBuffers.removeFirst();
+ outputBuffer = availableOutputBuffers[--availableOutputBufferCount];
+ flushDecodedOutputBuffer = false;
+ }
+
+ // Decode.
+ int decodeResult = -1;
+ if (inputBuffer.flags == FLAG_END_OF_STREAM) {
+ outputBuffer.flags = FLAG_END_OF_STREAM;
+ } else {
+ SampleHolder sampleHolder = inputBuffer.sampleHolder;
+ outputBuffer.timestampUs = sampleHolder.timeUs;
+ sampleHolder.data.position(sampleHolder.data.position() - sampleHolder.size);
+ decodeResult = decoder.decode(sampleHolder.data, sampleHolder.size, outputBuffer, outputRgb);
+ }
+
+ synchronized (lock) {
+ if (flushDecodedOutputBuffer
+ || inputBuffer.sampleHolder.isDecodeOnly()
+ || decodeResult == 1) {
+ // In the following cases, we make the output buffer available again rather than queuing it
+ // to be consumed:
+ // 1) A flush occured whilst we were decoding.
+ // 2) The input sample has decodeOnly flag set.
+ // 3) The decode succeeded, but we did not get any frame back for rendering (happens in case
+ // of an unpacked altref frame).
+ availableOutputBuffers[availableOutputBufferCount++] = outputBuffer;
+ } else {
+ // Queue the decoded output buffer to be consumed.
+ queuedOutputBuffers.addLast(outputBuffer);
+ }
+ // Make the input buffer available again.
+ availableInputBuffers[availableInputBufferCount++] = inputBuffer;
+ }
+
+ return true;
+ }
+
+ /* package */ static final class InputBuffer {
+
+ public final SampleHolder sampleHolder;
+
+ public int width;
+ public int height;
+ public int flags;
+
+ public InputBuffer() {
+ sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT);
+ sampleHolder.data = ByteBuffer.allocateDirect(INPUT_BUFFER_SIZE);
+ }
+
+ }
+
+ /* package */ static final class OutputBuffer {
+
+ public ByteBuffer data;
+ public long timestampUs;
+ public int width;
+ public int height;
+ public int flags;
+ public ByteBuffer[] yuvPlanes;
+ public int[] yuvStrides;
+
+ /**
+ * This method is called from C++ through JNI after decoding is done. It will resize the
+ * buffer based on the given dimensions.
+ */
+ public void initForRgbFrame(int width, int height) {
+ this.width = width;
+ this.height = height;
+ int minimumRgbSize = width * height * 2;
+ if (data == null || data.capacity() < minimumRgbSize) {
+ data = ByteBuffer.allocateDirect(minimumRgbSize);
+ yuvPlanes = null;
+ }
+ data.position(0);
+ data.limit(minimumRgbSize);
+ }
+
+ /**
+ * This method is called from C++ through JNI after decoding is done. It will resize the
+ * buffer based on the given stride.
+ */
+ public void initForYuvFrame(int width, int height, int yStride, int uvStride) {
+ this.width = width;
+ this.height = height;
+ int yLength = yStride * height;
+ int uvLength = uvStride * ((height + 1) / 2);
+ int minimumYuvSize = yLength + (uvLength * 2);
+ if (data == null || data.capacity() < minimumYuvSize) {
+ data = ByteBuffer.allocateDirect(minimumYuvSize);
+ }
+ data.limit(minimumYuvSize);
+ if (yuvPlanes == null) {
+ yuvPlanes = new ByteBuffer[3];
+ }
+ // Rewrapping has to be done on every frame since the stride might have changed.
+ data.position(0);
+ yuvPlanes[0] = data.slice();
+ yuvPlanes[0].limit(yLength);
+ data.position(yLength);
+ yuvPlanes[1] = data.slice();
+ yuvPlanes[1].limit(uvLength);
+ data.position(yLength + uvLength);
+ yuvPlanes[2] = data.slice();
+ yuvPlanes[2].limit(uvLength);
+ if (yuvStrides == null) {
+ yuvStrides = new int[3];
+ }
+ yuvStrides[0] = yStride;
+ yuvStrides[1] = uvStride;
+ yuvStrides[2] = uvStride;
+ }
+
+ }
+
+}
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxRenderer.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxRenderer.java
new file mode 100644
index 0000000000..c78619594f
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxRenderer.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
+
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+/**
+ * GLSurfaceView.Renderer implementation that can render YUV Frames returned by libvpx after
+ * decoding. It does the YUV to RGB color conversion in the Fragment Shader.
+ */
+/* package */ class VpxRenderer implements GLSurfaceView.Renderer {
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec2 in_tc;\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = in_tc;\n"
+ + "}\n";
+ private static final String[] TEXTURE_UNIFORMS = {"y_tex", "u_tex", "v_tex"};
+ private static final String FRAGMENT_SHADER =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "void main() {\n"
+ + " float y = 1.164 * (texture2D(y_tex, interp_tc).r - 0.0625);\n"
+ + " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ + " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ + " gl_FragColor = vec4(y + 1.596 * v, "
+ + " y - 0.391 * u - 0.813 * v, "
+ + " y + 2.018 * u, "
+ + " 1.0);\n"
+ + "}\n";
+ private static final FloatBuffer TEXTURE_VERTICES = nativeFloatBuffer(
+ -1.0f, 1.0f,
+ -1.0f, -1.0f,
+ 1.0f, 1.0f,
+ 1.0f, -1.0f);
+ private final int[] yuvTextures = new int[3];
+
+ private int program;
+ private int texLocation;
+ private FloatBuffer textureCoords;
+ private volatile OutputBuffer outputBuffer;
+ private int previousWidth;
+ private int previousStride;
+
+ public VpxRenderer() {
+ previousWidth = -1;
+ previousStride = -1;
+ }
+
+ /**
+ * Set a frame to be rendered. This should be followed by a call to
+ * VpxVideoSurfaceView.requestRender() to actually render the frame.
+ *
+ * @param outputBuffer OutputBuffer containing the YUV Frame to be rendered
+ */
+ public void setFrame(OutputBuffer outputBuffer) {
+ this.outputBuffer = outputBuffer;
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ // Create the GL program.
+ program = GLES20.glCreateProgram();
+
+ // Add the vertex and fragment shaders.
+ addShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER, program);
+ addShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER, program);
+
+ // Link the GL program.
+ GLES20.glLinkProgram(program);
+ int[] result = new int[] {
+ GLES20.GL_FALSE
+ };
+ result[0] = GLES20.GL_FALSE;
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
+ abortUnless(result[0] == GLES20.GL_TRUE, GLES20.glGetProgramInfoLog(program));
+ GLES20.glUseProgram(program);
+ int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
+ GLES20.glEnableVertexAttribArray(posLocation);
+ GLES20.glVertexAttribPointer(
+ posLocation, 2, GLES20.GL_FLOAT, false, 0, TEXTURE_VERTICES);
+ texLocation = GLES20.glGetAttribLocation(program, "in_tc");
+ GLES20.glEnableVertexAttribArray(texLocation);
+ setupTextures();
+ checkNoGLES2Error();
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ GLES20.glViewport(0, 0, width, height);
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ OutputBuffer outputBuffer = this.outputBuffer;
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ if (outputBuffer == null) {
+ // Nothing to render yet.
+ return;
+ }
+ for (int i = 0; i < 3; i++) {
+ int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2;
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, outputBuffer.yuvStrides[i],
+ h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, outputBuffer.yuvPlanes[i]);
+ }
+ // Set cropping of stride if either width or stride has changed.
+ if (previousWidth != outputBuffer.width || previousStride != outputBuffer.yuvStrides[0]) {
+ float crop = (float) outputBuffer.width / outputBuffer.yuvStrides[0];
+ textureCoords = nativeFloatBuffer(
+ 0.0f, 0.0f,
+ 0.0f, 1.0f,
+ crop, 0.0f,
+ crop, 1.0f);
+ GLES20.glVertexAttribPointer(
+ texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
+ previousWidth = outputBuffer.width;
+ previousStride = outputBuffer.yuvStrides[0];
+ }
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkNoGLES2Error();
+ }
+
+ private void addShader(int type, String source, int program) {
+ int[] result = new int[] {
+ GLES20.GL_FALSE
+ };
+ int shader = GLES20.glCreateShader(type);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
+ abortUnless(result[0] == GLES20.GL_TRUE,
+ GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
+ GLES20.glAttachShader(program, shader);
+ GLES20.glDeleteShader(shader);
+
+ checkNoGLES2Error();
+ }
+
+ private void setupTextures() {
+ GLES20.glGenTextures(3, yuvTextures, 0);
+ for (int i = 0; i < 3; i++) {
+ GLES20.glUniform1i(GLES20.glGetUniformLocation(program, TEXTURE_UNIFORMS[i]), i);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ }
+ checkNoGLES2Error();
+ }
+
+ private void abortUnless(boolean condition, String msg) {
+ if (!condition) {
+ throw new RuntimeException(msg);
+ }
+ }
+
+ private void checkNoGLES2Error() {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException("GLES20 error: " + error);
+ }
+ }
+
+ private static FloatBuffer nativeFloatBuffer(float... array) {
+ FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
+ ByteOrder.nativeOrder()).asFloatBuffer();
+ buffer.put(array);
+ buffer.flip();
+ return buffer;
+ }
+
+}
diff --git a/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxVideoSurfaceView.java b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxVideoSurfaceView.java
new file mode 100644
index 0000000000..1a8b33d806
--- /dev/null
+++ b/extensions/vp9/src/main/java/com/google/android/exoplayer/ext/vp9/VpxVideoSurfaceView.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.android.exoplayer.ext.vp9;
+
+import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+
+/**
+ * A GLSurfaceView extension that scales itself to the given aspect ratio.
+ */
+@TargetApi(11)
+public class VpxVideoSurfaceView extends GLSurfaceView {
+
+ private static final float MAX_ASPECT_RATIO_DEFORMATION_PERCENT = 0.01f;
+
+ private final VpxRenderer renderer;
+
+ private float videoAspectRatio;
+
+ public VpxVideoSurfaceView(Context context) {
+ this(context, null);
+ }
+
+ public VpxVideoSurfaceView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ renderer = new VpxRenderer();
+ setPreserveEGLContextOnPause(true);
+ setEGLContextClientVersion(2);
+ setRenderer(renderer);
+ setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+ }
+
+ public void renderFrame(OutputBuffer outputBuffer) {
+ renderer.setFrame(outputBuffer);
+ requestRender();
+ }
+
+ /**
+ * Set the aspect ratio that this view should satisfy.
+ *
+ * @param widthHeightRatio The width to height ratio.
+ */
+ public void setVideoWidthHeightRatio(float widthHeightRatio) {
+ if (this.videoAspectRatio != widthHeightRatio) {
+ this.videoAspectRatio = widthHeightRatio;
+ requestLayout();
+ }
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ int width = getMeasuredWidth();
+ int height = getMeasuredHeight();
+ if (videoAspectRatio != 0) {
+ float viewAspectRatio = (float) width / height;
+ float aspectDeformation = videoAspectRatio / viewAspectRatio - 1;
+ if (aspectDeformation > MAX_ASPECT_RATIO_DEFORMATION_PERCENT) {
+ height = (int) (width / videoAspectRatio);
+ } else if (aspectDeformation < -MAX_ASPECT_RATIO_DEFORMATION_PERCENT) {
+ width = (int) (height * videoAspectRatio);
+ }
+ }
+ setMeasuredDimension(width, height);
+ }
+
+}
diff --git a/extensions/vp9/src/main/jni/Android.mk b/extensions/vp9/src/main/jni/Android.mk
new file mode 100644
index 0000000000..fa566dbb50
--- /dev/null
+++ b/extensions/vp9/src/main/jni/Android.mk
@@ -0,0 +1,43 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+WORKING_DIR := $(call my-dir)
+include $(CLEAR_VARS)
+APP_PLATFORM := android-10
+LIBVPX_ROOT := $(WORKING_DIR)/libvpx
+LIBYUV_ROOT := $(WORKING_DIR)/libyuv
+
+# build libyuv_static.a
+LOCAL_PATH := $(WORKING_DIR)
+include $(LIBYUV_ROOT)/Android.mk
+
+# build libvpx.so
+LOCAL_PATH := $(WORKING_DIR)
+include libvpx.mk
+
+# build libvpxJNI.so
+include $(CLEAR_VARS)
+LOCAL_PATH := $(WORKING_DIR)
+LOCAL_MODULE := libvpxJNI
+LOCAL_ARM_MODE := arm
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := vpx_jni.cc
+LOCAL_LDLIBS := -llog -lz -lm
+LOCAL_SHARED_LIBRARIES := libvpx
+LOCAL_STATIC_LIBRARIES := libyuv_static cpufeatures
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,android/cpufeatures)
diff --git a/extensions/vp9/src/main/jni/Application.mk b/extensions/vp9/src/main/jni/Application.mk
new file mode 100644
index 0000000000..6563af0f50
--- /dev/null
+++ b/extensions/vp9/src/main/jni/Application.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+APP_OPTIM := release
+APP_STL := gnustl_static
+APP_CPPFLAGS := -frtti
diff --git a/extensions/vp9/src/main/jni/generate_libvpx_android_configs.sh b/extensions/vp9/src/main/jni/generate_libvpx_android_configs.sh
new file mode 100755
index 0000000000..d86978bd18
--- /dev/null
+++ b/extensions/vp9/src/main/jni/generate_libvpx_android_configs.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# a bash script that generates the necessary config files for libvpx android ndk
+# builds.
+
+set -e
+
+if [ $# -ne 1 ]; then
+ echo "Usage: ${0} "
+ exit
+fi
+
+ndk="${1}"
+shift 1
+
+# configuration parameters common to all architectures
+common_params="--disable-examples --disable-docs --enable-realtime-only"
+common_params+=" --disable-vp8 --disable-vp9-encoder --disable-webm-io"
+common_params+=" --disable-libyuv --disable-runtime-cpu-detect"
+
+# configuration parameters for various architectures
+arch[0]="armeabi-v7a"
+config[0]="--target=armv7-android-gcc --sdk-path=$ndk --enable-neon"
+config[0]+=" --enable-neon-asm"
+
+arch[1]="armeabi"
+config[1]="--target=armv7-android-gcc --sdk-path=$ndk --disable-neon"
+config[1]+=" --disable-neon-asm"
+
+arch[2]="mips"
+config[2]="--force-target=mips32-android-gcc --sdk-path=$ndk"
+
+arch[3]="x86"
+config[3]="--force-target=x86-android-gcc --sdk-path=$ndk --disable-sse3"
+config[3]+=" --disable-ssse3 --disable-sse4_1 --disable-avx --disable-avx2"
+config[3]+=" --enable-pic"
+
+limit=$((${#arch[@]} - 1))
+
+# list of files allowed after running configure in each arch directory.
+# everything else will be removed.
+allowed_files="libvpx_srcs.txt vpx_config.c vpx_config.h vpx_scale_rtcd.h"
+allowed_files+=" vp8_rtcd.h vp9_rtcd.h vpx_version.h vpx_config.asm"
+
+remove_trailing_whitespace() {
+ sed -i 's/\s\+$//' "$@"
+}
+
+convert_asm() {
+ for i in $(seq 0 ${limit}); do
+ while read file; do
+ case "${file}" in
+ *.asm.s)
+ asm_file="libvpx/${file%.s}"
+ cat "${asm_file}" | libvpx/build/make/ads2gas.pl > "libvpx/${file}"
+ remove_trailing_whitespace "libvpx/${file}"
+ rm "${asm_file}"
+ ;;
+ esac
+ done < libvpx_android_configs/${arch[${i}]}/libvpx_srcs.txt
+ done
+}
+
+extglob_status="$(shopt extglob | cut -f2)"
+shopt -s extglob
+for i in $(seq 0 ${limit}); do
+ mkdir -p "libvpx_android_configs/${arch[${i}]}"
+ cd "libvpx_android_configs/${arch[${i}]}"
+
+ # configure and make
+ echo "build_android_configs: "
+ echo "configure ${config[${i}]} ${common_params}"
+ ../../libvpx/configure ${config[${i}]} ${common_params}
+ rm -f libvpx_srcs.txt
+ make libvpx_srcs.txt
+
+ # remove files that aren't needed
+ rm -rf !(${allowed_files// /|})
+ remove_trailing_whitespace *
+
+ cd ../..
+done
+
+# restore extglob status as it was before
+if [[ "${extglob_status}" == "off" ]]; then
+ shopt -u extglob
+fi
+
+convert_asm
+
+echo "Generated android config files."
diff --git a/extensions/vp9/src/main/jni/libvpx.mk b/extensions/vp9/src/main/jni/libvpx.mk
new file mode 100644
index 0000000000..369b3b7f94
--- /dev/null
+++ b/extensions/vp9/src/main/jni/libvpx.mk
@@ -0,0 +1,51 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+CONFIG_DIR := $(LOCAL_PATH)/libvpx_android_configs/$(TARGET_ARCH_ABI)
+libvpx_source_dir := $(LOCAL_PATH)/libvpx
+
+LOCAL_MODULE := libvpx
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_CFLAGS := -DHAVE_CONFIG_H=vpx_config.h
+LOCAL_ARM_MODE := arm
+LOCAL_CFLAGS += -O3
+
+# config specific include should go first to pick up the config specific rtcd.
+LOCAL_C_INCLUDES := $(CONFIG_DIR) $(libvpx_source_dir)
+
+# generate source file list
+libvpx_codec_srcs := $(sort $(shell cat $(CONFIG_DIR)/libvpx_srcs.txt))
+LOCAL_SRC_FILES := libvpx_android_configs/$(TARGET_ARCH_ABI)/vpx_config.c
+LOCAL_SRC_FILES += $(addprefix libvpx/, $(filter-out vpx_config.c, \
+ $(filter %.c, $(libvpx_codec_srcs))))
+
+# include assembly files if they exist
+# "%.asm.s" covers neon assembly and "%.asm" covers x86 assembly
+LOCAL_SRC_FILES += $(addprefix libvpx/, \
+ $(filter %.asm.s %.asm, $(libvpx_codec_srcs)))
+
+ifneq ($(findstring armeabi-v7a, $(TARGET_ARCH_ABI)),)
+# append .neon to *_neon.c and *.s
+LOCAL_SRC_FILES := $(subst _neon.c,_neon.c.neon,$(LOCAL_SRC_FILES))
+LOCAL_SRC_FILES := $(subst .s,.s.neon,$(LOCAL_SRC_FILES))
+endif
+
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libvpx \
+ $(LOCAL_PATH)/libvpx/vpx
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/extensions/vp9/src/main/jni/vpx_jni.cc b/extensions/vp9/src/main/jni/vpx_jni.cc
new file mode 100644
index 0000000000..21a845d779
--- /dev/null
+++ b/extensions/vp9/src/main/jni/vpx_jni.cc
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include
+#include
+
+#include
+
+#include
+#include
+#include
+#include
+
+#include "libyuv.h" // NOLINT
+
+#define VPX_CODEC_DISABLE_COMPAT 1
+#include "vpx/vpx_decoder.h"
+#include "vpx/vp8dx.h"
+
+#define LOG_TAG "LIBVPX_DEC"
+#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, \
+ __VA_ARGS__))
+
+#define FUNC(RETURN_TYPE, NAME, ...) \
+ extern "C" { \
+ JNIEXPORT RETURN_TYPE \
+ Java_com_google_android_exoplayer_ext_vp9_VpxDecoder_ ## NAME \
+ (JNIEnv* env, jobject thiz, ##__VA_ARGS__);\
+ } \
+ JNIEXPORT RETURN_TYPE \
+ Java_com_google_android_exoplayer_ext_vp9_VpxDecoder_ ## NAME \
+ (JNIEnv* env, jobject thiz, ##__VA_ARGS__)\
+
+// JNI references for OutputBuffer class.
+static jmethodID initForRgbFrame;
+static jmethodID initForYuvFrame;
+static jfieldID dataField;
+
+jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ JNIEnv* env;
+ if (vm->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_6) != JNI_OK) {
+ return -1;
+ }
+ return JNI_VERSION_1_6;
+}
+
+FUNC(jlong, vpxInit) {
+ vpx_codec_ctx_t* context = new vpx_codec_ctx_t();
+ vpx_codec_dec_cfg_t cfg = {0};
+ cfg.threads = android_getCpuCount();
+ if (vpx_codec_dec_init(context, &vpx_codec_vp9_dx_algo, &cfg, 0)) {
+ LOGE("ERROR: Fail to initialize libvpx decoder.");
+ return 0;
+ }
+
+ // Populate JNI References.
+ const jclass outputBufferClass = env->FindClass(
+ "com/google/android/exoplayer/ext/vp9/VpxDecoderWrapper$OutputBuffer");
+ initForYuvFrame = env->GetMethodID(outputBufferClass, "initForYuvFrame",
+ "(IIII)V");
+ initForRgbFrame = env->GetMethodID(outputBufferClass, "initForRgbFrame",
+ "(II)V");
+ dataField = env->GetFieldID(outputBufferClass, "data",
+ "Ljava/nio/ByteBuffer;");
+
+ return reinterpret_cast(context);
+}
+
+FUNC(jlong, vpxDecode, jlong jContext, jobject encoded, jint len) {
+ vpx_codec_ctx_t* const context = reinterpret_cast(jContext);
+ const uint8_t* const buffer =
+ reinterpret_cast(env->GetDirectBufferAddress(encoded));
+ const vpx_codec_err_t status =
+ vpx_codec_decode(context, buffer, len, NULL, 0);
+ if (status != VPX_CODEC_OK) {
+ LOGE("ERROR: vpx_codec_decode() failed, status= %d", status);
+ return -1;
+ }
+ return 0;
+}
+
+FUNC(jlong, vpxClose, jlong jContext) {
+ vpx_codec_ctx_t* const context = reinterpret_cast(jContext);
+ vpx_codec_destroy(context);
+ delete context;
+ return 0;
+}
+
+FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
+ vpx_codec_ctx_t* const context = reinterpret_cast(jContext);
+ vpx_codec_iter_t iter = NULL;
+ const vpx_image_t* const img = vpx_codec_get_frame(context, &iter);
+
+ if (img == NULL) {
+ return 1;
+ }
+
+ if (isRGB == JNI_TRUE) {
+ // resize buffer if required.
+ env->CallVoidMethod(jOutputBuffer, initForRgbFrame, img->d_w, img->d_h);
+
+ // get pointer to the data buffer.
+ const jobject dataObject = env->GetObjectField(jOutputBuffer, dataField);
+ uint8_t* const dst =
+ reinterpret_cast(env->GetDirectBufferAddress(dataObject));
+
+ libyuv::I420ToRGB565(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
+ img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
+ img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
+ dst, img->d_w * 2, img->d_w, img->d_h);
+ } else {
+ // resize buffer if required.
+ env->CallVoidMethod(jOutputBuffer, initForYuvFrame, img->d_w, img->d_h,
+ img->stride[VPX_PLANE_Y], img->stride[VPX_PLANE_U]);
+
+ // get pointer to the data buffer.
+ const jobject dataObject = env->GetObjectField(jOutputBuffer, dataField);
+ jbyte* const data =
+ reinterpret_cast(env->GetDirectBufferAddress(dataObject));
+
+ // TODO: This copy can be eliminated by using external frame buffers. NOLINT
+ // This is insignificant for smaller videos but takes ~1.5ms for 1080p
+ // clips. So this should eventually be gotten rid of.
+ const uint64_t y_length = img->stride[VPX_PLANE_Y] * img->d_h;
+ const uint64_t uv_length = img->stride[VPX_PLANE_U] * ((img->d_h + 1) / 2);
+ memcpy(data, img->planes[VPX_PLANE_Y], y_length);
+ memcpy(data + y_length, img->planes[VPX_PLANE_U], uv_length);
+ memcpy(data + y_length + uv_length, img->planes[VPX_PLANE_V], uv_length);
+ }
+ return 0;
+}
+
+FUNC(jstring, vpxGetErrorMessage, jlong jContext) {
+ vpx_codec_ctx_t* const context = reinterpret_cast(jContext);
+ return env->NewStringUTF(vpx_codec_error(context));
+}
diff --git a/extensions/vp9/src/main/project.properties b/extensions/vp9/src/main/project.properties
new file mode 100644
index 0000000000..2ed62fbfcf
--- /dev/null
+++ b/extensions/vp9/src/main/project.properties
@@ -0,0 +1,16 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-22
+android.library=true
+android.library.reference.1=../../../../library/src/main
diff --git a/extensions/vp9/src/main/res/.README.txt b/extensions/vp9/src/main/res/.README.txt
new file mode 100644
index 0000000000..c27147ce56
--- /dev/null
+++ b/extensions/vp9/src/main/res/.README.txt
@@ -0,0 +1,2 @@
+This file is needed to make sure the res directory is present.
+The file is ignored by the Android toolchain because its name starts with a dot.