Libvpx: Support directly outputting YUV to SurfaceView. This is supposed to use hardware (not GPU) to convert YUV to RGB and overall use less power.

Power Comparison:

TextureView                          1080p         720p
                H264 HW              498, 496      507, 478
                VP9 RGB              1050, 1104    1185, 1152
                VP9 ANativeWindow    1070, 985     700, 674
GLSurfaceView
                VP9 YUV              1075, 1112    716, 635
SurfaceView
                H264 HW              419, 409      397, 377
                VP9 RGB              1044, 1139    654, 671
                VP9 ANativeWindow    975, 835      617, 623
                VP9 MediaCodec       683, 679      488, 476

Measures average current drawn mAH on a Nexus 6 at full brightness from time t=3 to t=95 seconds. The same clip was used for all tests. Two measurements were taken for each category.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=203847744
This commit is contained in:
anjalibh 2018-07-09 16:04:09 -07:00 committed by Oliver Woodman
parent 17a58969af
commit e60de62588
6 changed files with 345 additions and 35 deletions

View File

@ -114,6 +114,7 @@ public class LibvpxVideoRenderer extends BaseRenderer {
private final FormatHolder formatHolder;
private final DecoderInputBuffer flagsOnlyBuffer;
private final DrmSessionManager<ExoMediaCrypto> drmSessionManager;
private final boolean useSurfaceYuvOutput;
private Format format;
private VpxDecoder decoder;
@ -177,7 +178,8 @@ public class LibvpxVideoRenderer extends BaseRenderer {
maxDroppedFramesToNotify,
/* drmSessionManager= */ null,
/* playClearSamplesWithoutKeys= */ false,
/* disableLoopFilter= */ false);
/* disableLoopFilter= */ false,
/* useSurfaceYuvOutput= */ false);
}
/**
@ -197,11 +199,18 @@ public class LibvpxVideoRenderer extends BaseRenderer {
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param disableLoopFilter Disable the libvpx in-loop smoothing filter.
* @param useSurfaceYuvOutput Directly output YUV to the Surface via ANativeWindow.
*/
public LibvpxVideoRenderer(boolean scaleToFit, long allowedJoiningTimeMs,
Handler eventHandler, VideoRendererEventListener eventListener,
int maxDroppedFramesToNotify, DrmSessionManager<ExoMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, boolean disableLoopFilter) {
public LibvpxVideoRenderer(
boolean scaleToFit,
long allowedJoiningTimeMs,
Handler eventHandler,
VideoRendererEventListener eventListener,
int maxDroppedFramesToNotify,
DrmSessionManager<ExoMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys,
boolean disableLoopFilter,
boolean useSurfaceYuvOutput) {
super(C.TRACK_TYPE_VIDEO);
this.scaleToFit = scaleToFit;
this.disableLoopFilter = disableLoopFilter;
@ -209,6 +218,7 @@ public class LibvpxVideoRenderer extends BaseRenderer {
this.maxDroppedFramesToNotify = maxDroppedFramesToNotify;
this.drmSessionManager = drmSessionManager;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
this.useSurfaceYuvOutput = useSurfaceYuvOutput;
joiningDeadlineMs = C.TIME_UNSET;
clearReportedVideoSize();
formatHolder = new FormatHolder();
@ -549,21 +559,25 @@ public class LibvpxVideoRenderer extends BaseRenderer {
*
* @param outputBuffer The buffer to render.
*/
protected void renderOutputBuffer(VpxOutputBuffer outputBuffer) {
protected void renderOutputBuffer(VpxOutputBuffer outputBuffer) throws VpxDecoderException {
int bufferMode = outputBuffer.mode;
boolean renderRgb = bufferMode == VpxDecoder.OUTPUT_MODE_RGB && surface != null;
boolean renderSurface = bufferMode == VpxDecoder.OUTPUT_MODE_SURFACE_YUV && surface != null;
boolean renderYuv = bufferMode == VpxDecoder.OUTPUT_MODE_YUV && outputBufferRenderer != null;
lastRenderTimeUs = SystemClock.elapsedRealtime() * 1000;
if (!renderRgb && !renderYuv) {
if (!renderRgb && !renderYuv && !renderSurface) {
dropOutputBuffer(outputBuffer);
} else {
maybeNotifyVideoSizeChanged(outputBuffer.width, outputBuffer.height);
if (renderRgb) {
renderRgbFrame(outputBuffer, scaleToFit);
outputBuffer.release();
} else /* renderYuv */ {
} else if (renderYuv) {
outputBufferRenderer.setOutputBuffer(outputBuffer);
// The renderer will release the buffer.
} else { // renderSurface
decoder.renderToSurface(outputBuffer, surface);
outputBuffer.release();
}
consecutiveDroppedFrameCount = 0;
decoderCounters.renderedOutputBufferCount++;
@ -633,8 +647,13 @@ public class LibvpxVideoRenderer extends BaseRenderer {
// The output has changed.
this.surface = surface;
this.outputBufferRenderer = outputBufferRenderer;
outputMode = outputBufferRenderer != null ? VpxDecoder.OUTPUT_MODE_YUV
: surface != null ? VpxDecoder.OUTPUT_MODE_RGB : VpxDecoder.OUTPUT_MODE_NONE;
if (surface != null) {
outputMode =
useSurfaceYuvOutput ? VpxDecoder.OUTPUT_MODE_SURFACE_YUV : VpxDecoder.OUTPUT_MODE_RGB;
} else {
outputMode =
outputBufferRenderer != null ? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_NONE;
}
if (outputMode != VpxDecoder.OUTPUT_MODE_NONE) {
if (decoder != null) {
decoder.setOutputMode(outputMode);
@ -690,7 +709,8 @@ public class LibvpxVideoRenderer extends BaseRenderer {
NUM_OUTPUT_BUFFERS,
INITIAL_INPUT_BUFFER_SIZE,
mediaCrypto,
disableLoopFilter);
disableLoopFilter,
useSurfaceYuvOutput);
decoder.setOutputMode(outputMode);
TraceUtil.endSection();
long decoderInitializedTimestamp = SystemClock.elapsedRealtime();
@ -817,7 +837,7 @@ public class LibvpxVideoRenderer extends BaseRenderer {
* @throws ExoPlaybackException If an error occurs processing the output buffer.
*/
private boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs)
throws ExoPlaybackException {
throws ExoPlaybackException, VpxDecoderException {
if (initialPositionUs == C.TIME_UNSET) {
initialPositionUs = positionUs;
}

View File

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.ext.vp9;
import android.view.Surface;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.decoder.CryptoInfo;
import com.google.android.exoplayer2.decoder.SimpleDecoder;
@ -31,6 +32,7 @@ import java.nio.ByteBuffer;
public static final int OUTPUT_MODE_NONE = -1;
public static final int OUTPUT_MODE_YUV = 0;
public static final int OUTPUT_MODE_RGB = 1;
public static final int OUTPUT_MODE_SURFACE_YUV = 2;
private static final int NO_ERROR = 0;
private static final int DECODE_ERROR = 1;
@ -50,10 +52,17 @@ import java.nio.ByteBuffer;
* @param exoMediaCrypto The {@link ExoMediaCrypto} object required for decoding encrypted
* content. Maybe null and can be ignored if decoder does not handle encrypted content.
* @param disableLoopFilter Disable the libvpx in-loop smoothing filter.
* @param enableSurfaceYuvOutputMode Whether OUTPUT_MODE_SURFACE_YUV is allowed.
* @throws VpxDecoderException Thrown if an exception occurs when initializing the decoder.
*/
public VpxDecoder(int numInputBuffers, int numOutputBuffers, int initialInputBufferSize,
ExoMediaCrypto exoMediaCrypto, boolean disableLoopFilter) throws VpxDecoderException {
public VpxDecoder(
int numInputBuffers,
int numOutputBuffers,
int initialInputBufferSize,
ExoMediaCrypto exoMediaCrypto,
boolean disableLoopFilter,
boolean enableSurfaceYuvOutputMode)
throws VpxDecoderException {
super(new VpxInputBuffer[numInputBuffers], new VpxOutputBuffer[numOutputBuffers]);
if (!VpxLibrary.isAvailable()) {
throw new VpxDecoderException("Failed to load decoder native libraries.");
@ -62,7 +71,7 @@ import java.nio.ByteBuffer;
if (exoMediaCrypto != null && !VpxLibrary.vpxIsSecureDecodeSupported()) {
throw new VpxDecoderException("Vpx decoder does not support secure decode.");
}
vpxDecContext = vpxInit(disableLoopFilter);
vpxDecContext = vpxInit(disableLoopFilter, enableSurfaceYuvOutputMode);
if (vpxDecContext == 0) {
throw new VpxDecoderException("Failed to initialize decoder");
}
@ -96,6 +105,11 @@ import java.nio.ByteBuffer;
@Override
protected void releaseOutputBuffer(VpxOutputBuffer buffer) {
// Decode only frames do not acquire a reference on the internal decoder buffer and thus do not
// require a call to vpxReleaseFrame.
if (outputMode == OUTPUT_MODE_SURFACE_YUV && !buffer.isDecodeOnly()) {
vpxReleaseFrame(vpxDecContext, buffer);
}
super.releaseOutputBuffer(buffer);
}
@ -145,13 +159,36 @@ import java.nio.ByteBuffer;
vpxClose(vpxDecContext);
}
private native long vpxInit(boolean disableLoopFilter);
/** Renders the outputBuffer to the surface. Used with OUTPUT_MODE_SURFACE_YUV only. */
public void renderToSurface(VpxOutputBuffer outputBuffer, Surface surface)
throws VpxDecoderException {
int getFrameResult = vpxRenderFrame(vpxDecContext, surface, outputBuffer);
if (getFrameResult == -1) {
throw new VpxDecoderException("Buffer render failed.");
}
}
private native long vpxInit(boolean disableLoopFilter, boolean enableSurfaceYuvOutputMode);
private native long vpxClose(long context);
private native long vpxDecode(long context, ByteBuffer encoded, int length);
private native long vpxSecureDecode(long context, ByteBuffer encoded, int length,
ExoMediaCrypto mediaCrypto, int inputMode, byte[] key, byte[] iv,
int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEncryptedData);
private native int vpxGetFrame(long context, VpxOutputBuffer outputBuffer);
/**
* Renders the frame to the surface. Used with OUTPUT_MODE_SURFACE_YUV only. Must only be called
* if {@link #vpxInit} was called with {@code enableBufferManager = true}.
*/
private native int vpxRenderFrame(long context, Surface surface, VpxOutputBuffer outputBuffer);
/**
* Releases the frame. Used with OUTPUT_MODE_SURFACE_YUV only. Must only be called if {@link
* #vpxInit} was called with {@code enableBufferManager = true}.
*/
private native int vpxReleaseFrame(long context, VpxOutputBuffer outputBuffer);
private native int vpxGetErrorCode(long context);
private native String vpxGetErrorMessage(long context);

View File

@ -30,6 +30,8 @@ import java.nio.ByteBuffer;
public static final int COLORSPACE_BT2020 = 3;
private final VpxDecoder owner;
/** Decoder private data. */
public int decoderPrivate;
public int mode;
/**

View File

@ -35,7 +35,7 @@ LOCAL_MODULE := libvpxJNI
LOCAL_ARM_MODE := arm
LOCAL_CPP_EXTENSION := .cc
LOCAL_SRC_FILES := vpx_jni.cc
LOCAL_LDLIBS := -llog -lz -lm
LOCAL_LDLIBS := -llog -lz -lm -landroid
LOCAL_SHARED_LIBRARIES := libvpx
LOCAL_STATIC_LIBRARIES := libyuv_static cpufeatures
include $(BUILD_SHARED_LIBRARY)

View File

@ -21,7 +21,9 @@
#include <jni.h>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <pthread.h>
#include <algorithm>
#include <cstdio>
#include <cstdlib>
@ -63,6 +65,11 @@ static jmethodID initForRgbFrame;
static jmethodID initForYuvFrame;
static jfieldID dataField;
static jfieldID outputModeField;
static jfieldID decoderPrivateField;
// android.graphics.ImageFormat.YV12.
static const int kHalPixelFormatYV12 = 0x32315659;
static const int kDecoderPrivateBase = 0x100;
static int errorCode;
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
@ -282,13 +289,166 @@ static void convert_16_to_8_standard(const vpx_image_t* const img,
}
}
DECODER_FUNC(jlong, vpxInit, jboolean disableLoopFilter) {
vpx_codec_ctx_t* context = new vpx_codec_ctx_t();
struct JniFrameBuffer {
friend class JniBufferManager;
int stride[4];
uint8_t* planes[4];
int d_w;
int d_h;
private:
int id;
int ref_count;
vpx_codec_frame_buffer_t vpx_fb;
};
class JniBufferManager {
static const int MAX_FRAMES = 32;
JniFrameBuffer* all_buffers[MAX_FRAMES];
int all_buffer_count = 0;
JniFrameBuffer* free_buffers[MAX_FRAMES];
int free_buffer_count = 0;
pthread_mutex_t mutex;
public:
JniBufferManager() { pthread_mutex_init(&mutex, NULL); }
~JniBufferManager() {
while (all_buffer_count--) {
free(all_buffers[all_buffer_count]->vpx_fb.data);
}
}
int get_buffer(size_t min_size, vpx_codec_frame_buffer_t* fb) {
pthread_mutex_lock(&mutex);
JniFrameBuffer* out_buffer;
if (free_buffer_count) {
out_buffer = free_buffers[--free_buffer_count];
if (out_buffer->vpx_fb.size < min_size) {
free(out_buffer->vpx_fb.data);
out_buffer->vpx_fb.data = (uint8_t*)malloc(min_size);
out_buffer->vpx_fb.size = min_size;
}
} else {
out_buffer = new JniFrameBuffer();
out_buffer->id = all_buffer_count;
all_buffers[all_buffer_count++] = out_buffer;
out_buffer->vpx_fb.data = (uint8_t*)malloc(min_size);
out_buffer->vpx_fb.size = min_size;
out_buffer->vpx_fb.priv = &out_buffer->id;
}
*fb = out_buffer->vpx_fb;
int retVal = 0;
if (!out_buffer->vpx_fb.data || all_buffer_count >= MAX_FRAMES) {
LOGE("ERROR: JniBufferManager get_buffer OOM.");
retVal = -1;
} else {
memset(fb->data, 0, fb->size);
}
out_buffer->ref_count = 1;
pthread_mutex_unlock(&mutex);
return retVal;
}
JniFrameBuffer* get_buffer(int id) const {
if (id < 0 || id >= all_buffer_count) {
LOGE("ERROR: JniBufferManager get_buffer invalid id %d.", id);
return NULL;
}
return all_buffers[id];
}
void add_ref(int id) {
if (id < 0 || id >= all_buffer_count) {
LOGE("ERROR: JniBufferManager add_ref invalid id %d.", id);
return;
}
pthread_mutex_lock(&mutex);
all_buffers[id]->ref_count++;
pthread_mutex_unlock(&mutex);
}
int release(int id) {
if (id < 0 || id >= all_buffer_count) {
LOGE("ERROR: JniBufferManager release invalid id %d.", id);
return -1;
}
pthread_mutex_lock(&mutex);
JniFrameBuffer* buffer = all_buffers[id];
if (!buffer->ref_count) {
LOGE("ERROR: JniBufferManager release, buffer already released.");
pthread_mutex_unlock(&mutex);
return -1;
}
if (!--buffer->ref_count) {
free_buffers[free_buffer_count++] = buffer;
}
pthread_mutex_unlock(&mutex);
return 0;
}
};
struct JniCtx {
JniCtx(bool enableBufferManager) {
if (enableBufferManager) {
buffer_manager = new JniBufferManager();
}
}
~JniCtx() {
if (native_window) {
ANativeWindow_release(native_window);
}
if (buffer_manager) {
delete buffer_manager;
}
}
void acquire_native_window(JNIEnv* env, jobject new_surface) {
if (surface != new_surface) {
if (native_window) {
ANativeWindow_release(native_window);
}
native_window = ANativeWindow_fromSurface(env, new_surface);
surface = new_surface;
width = 0;
}
}
JniBufferManager* buffer_manager = NULL;
vpx_codec_ctx_t* decoder = NULL;
ANativeWindow* native_window = NULL;
jobject surface = NULL;
int width = 0;
int height = 0;
};
int vpx_get_frame_buffer(void* priv, size_t min_size,
vpx_codec_frame_buffer_t* fb) {
JniBufferManager* const buffer_manager =
reinterpret_cast<JniBufferManager*>(priv);
return buffer_manager->get_buffer(min_size, fb);
}
int vpx_release_frame_buffer(void* priv, vpx_codec_frame_buffer_t* fb) {
JniBufferManager* const buffer_manager =
reinterpret_cast<JniBufferManager*>(priv);
return buffer_manager->release(*(int*)fb->priv);
}
DECODER_FUNC(jlong, vpxInit, jboolean disableLoopFilter,
jboolean enableBufferManager) {
JniCtx* context = new JniCtx(enableBufferManager);
context->decoder = new vpx_codec_ctx_t();
vpx_codec_dec_cfg_t cfg = {0, 0, 0};
cfg.threads = android_getCpuCount();
errorCode = 0;
vpx_codec_err_t err = vpx_codec_dec_init(context, &vpx_codec_vp9_dx_algo,
&cfg, 0);
vpx_codec_err_t err =
vpx_codec_dec_init(context->decoder, &vpx_codec_vp9_dx_algo, &cfg, 0);
if (err) {
LOGE("ERROR: Failed to initialize libvpx decoder, error = %d.", err);
errorCode = err;
@ -296,11 +456,20 @@ DECODER_FUNC(jlong, vpxInit, jboolean disableLoopFilter) {
}
if (disableLoopFilter) {
// TODO(b/71930387): Use vpx_codec_control(), not vpx_codec_control_().
err = vpx_codec_control_(context, VP9_SET_SKIP_LOOP_FILTER, true);
err = vpx_codec_control_(context->decoder, VP9_SET_SKIP_LOOP_FILTER, true);
if (err) {
LOGE("ERROR: Failed to shut off libvpx loop filter, error = %d.", err);
}
}
if (enableBufferManager) {
err = vpx_codec_set_frame_buffer_functions(
context->decoder, vpx_get_frame_buffer, vpx_release_frame_buffer,
context->buffer_manager);
if (err) {
LOGE("ERROR: Failed to set libvpx frame buffer functions, error = %d.",
err);
}
}
// Populate JNI References.
const jclass outputBufferClass = env->FindClass(
@ -312,16 +481,17 @@ DECODER_FUNC(jlong, vpxInit, jboolean disableLoopFilter) {
dataField = env->GetFieldID(outputBufferClass, "data",
"Ljava/nio/ByteBuffer;");
outputModeField = env->GetFieldID(outputBufferClass, "mode", "I");
decoderPrivateField =
env->GetFieldID(outputBufferClass, "decoderPrivate", "I");
return reinterpret_cast<intptr_t>(context);
}
DECODER_FUNC(jlong, vpxDecode, jlong jContext, jobject encoded, jint len) {
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
const uint8_t* const buffer =
reinterpret_cast<const uint8_t*>(env->GetDirectBufferAddress(encoded));
const vpx_codec_err_t status =
vpx_codec_decode(context, buffer, len, NULL, 0);
vpx_codec_decode(context->decoder, buffer, len, NULL, 0);
errorCode = 0;
if (status != VPX_CODEC_OK) {
LOGE("ERROR: vpx_codec_decode() failed, status= %d", status);
@ -343,16 +513,16 @@ DECODER_FUNC(jlong, vpxSecureDecode, jlong jContext, jobject encoded, jint len,
}
DECODER_FUNC(jlong, vpxClose, jlong jContext) {
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
vpx_codec_destroy(context);
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
vpx_codec_destroy(context->decoder);
delete context;
return 0;
}
DECODER_FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer) {
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
vpx_codec_iter_t iter = NULL;
const vpx_image_t* const img = vpx_codec_get_frame(context, &iter);
const vpx_image_t* const img = vpx_codec_get_frame(context->decoder, &iter);
if (img == NULL) {
return 1;
@ -360,6 +530,7 @@ DECODER_FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer) {
const int kOutputModeYuv = 0;
const int kOutputModeRgb = 1;
const int kOutputModeSurfaceYuv = 2;
int outputMode = env->GetIntField(jOutputBuffer, outputModeField);
if (outputMode == kOutputModeRgb) {
@ -435,13 +606,93 @@ DECODER_FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer) {
memcpy(data + yLength, img->planes[VPX_PLANE_U], uvLength);
memcpy(data + yLength + uvLength, img->planes[VPX_PLANE_V], uvLength);
}
} else if (outputMode == kOutputModeSurfaceYuv &&
img->fmt != VPX_IMG_FMT_I42016) {
if (!context->buffer_manager) {
return -1; // enableBufferManager was not set in vpxInit.
}
int id = *(int*)img->fb_priv;
context->buffer_manager->add_ref(id);
JniFrameBuffer* jfb = context->buffer_manager->get_buffer(id);
for (int i = 2; i >= 0; i--) {
jfb->stride[i] = img->stride[i];
jfb->planes[i] = (uint8_t*)img->planes[i];
}
jfb->d_w = img->d_w;
jfb->d_h = img->d_h;
env->SetIntField(jOutputBuffer, decoderPrivateField,
id + kDecoderPrivateBase);
}
return 0;
}
DECODER_FUNC(jint, vpxRenderFrame, jlong jContext, jobject jSurface,
jobject jOutputBuffer) {
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
const int id = env->GetIntField(jOutputBuffer, decoderPrivateField) -
kDecoderPrivateBase;
JniFrameBuffer* srcBuffer = context->buffer_manager->get_buffer(id);
context->acquire_native_window(env, jSurface);
if (context->native_window == NULL || !srcBuffer) {
return 1;
}
if (context->width != srcBuffer->d_w || context->height != srcBuffer->d_h) {
ANativeWindow_setBuffersGeometry(context->native_window, srcBuffer->d_w,
srcBuffer->d_h, kHalPixelFormatYV12);
context->width = srcBuffer->d_w;
context->height = srcBuffer->d_h;
}
ANativeWindow_Buffer buffer;
int result = ANativeWindow_lock(context->native_window, &buffer, NULL);
if (buffer.bits == NULL || result) {
return -1;
}
// Y
const size_t src_y_stride = srcBuffer->stride[VPX_PLANE_Y];
int stride = srcBuffer->d_w;
const uint8_t* src_base =
reinterpret_cast<uint8_t*>(srcBuffer->planes[VPX_PLANE_Y]);
uint8_t* dest_base = (uint8_t*)buffer.bits;
for (int y = 0; y < srcBuffer->d_h; y++) {
memcpy(dest_base, src_base, stride);
src_base += src_y_stride;
dest_base += buffer.stride;
}
// UV
const int src_uv_stride = srcBuffer->stride[VPX_PLANE_U];
const int dest_uv_stride = (buffer.stride / 2 + 15) & (~15);
const int32_t buffer_uv_height = (buffer.height + 1) / 2;
const int32_t height =
std::min((int32_t)(srcBuffer->d_h + 1) / 2, buffer_uv_height);
stride = (srcBuffer->d_w + 1) / 2;
src_base = reinterpret_cast<uint8_t*>(srcBuffer->planes[VPX_PLANE_U]);
const uint8_t* src_v_base =
reinterpret_cast<uint8_t*>(srcBuffer->planes[VPX_PLANE_V]);
uint8_t* dest_v_base =
((uint8_t*)buffer.bits) + buffer.stride * buffer.height;
dest_base = dest_v_base + buffer_uv_height * dest_uv_stride;
for (int y = 0; y < height; y++) {
memcpy(dest_base, src_base, stride);
memcpy(dest_v_base, src_v_base, stride);
src_base += src_uv_stride;
src_v_base += src_uv_stride;
dest_base += dest_uv_stride;
dest_v_base += dest_uv_stride;
}
return ANativeWindow_unlockAndPost(context->native_window);
}
DECODER_FUNC(void, vpxReleaseFrame, jlong jContext, jobject jOutputBuffer) {
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
const int id = env->GetIntField(jOutputBuffer, decoderPrivateField) -
kDecoderPrivateBase;
env->SetIntField(jOutputBuffer, decoderPrivateField, -1);
context->buffer_manager->release(id);
}
DECODER_FUNC(jstring, vpxGetErrorMessage, jlong jContext) {
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
return env->NewStringUTF(vpx_codec_error(context));
JniCtx* const context = reinterpret_cast<JniCtx*>(jContext);
return env->NewStringUTF(vpx_codec_error(context->decoder));
}
DECODER_FUNC(jint, vpxGetErrorCode, jlong jContext) { return errorCode; }

View File

@ -143,7 +143,7 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
releaseInputBufferInternal(queuedInputBuffers.removeFirst());
}
while (!queuedOutputBuffers.isEmpty()) {
releaseOutputBufferInternal(queuedOutputBuffers.removeFirst());
queuedOutputBuffers.removeFirst().release();
}
}
}
@ -241,10 +241,10 @@ public abstract class SimpleDecoder<I extends DecoderInputBuffer, O extends Outp
synchronized (lock) {
if (flushed) {
releaseOutputBufferInternal(outputBuffer);
outputBuffer.release();
} else if (outputBuffer.isDecodeOnly()) {
skippedOutputBufferCount++;
releaseOutputBufferInternal(outputBuffer);
outputBuffer.release();
} else {
outputBuffer.skippedOutputBufferCount = skippedOutputBufferCount;
skippedOutputBufferCount = 0;