Allow custom VPX output buffer renderers.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=109832096
This commit is contained in:
anjalibh 2015-12-09 14:23:36 -08:00 committed by Oliver Woodman
parent 7bc341f385
commit e487fe6fa7
9 changed files with 225 additions and 160 deletions

View File

@ -173,7 +173,7 @@ public class VideoPlayer extends Activity implements OnClickListener,
TrackRenderer videoRenderer =
new LibvpxVideoTrackRenderer(sampleSource, true, handler, this, 50);
if (useOpenGL) {
player.sendMessage(videoRenderer, LibvpxVideoTrackRenderer.MSG_SET_VPX_SURFACE_VIEW,
player.sendMessage(videoRenderer, LibvpxVideoTrackRenderer.MSG_SET_OUTPUT_BUFFER_RENDERER,
vpxVideoSurfaceView);
surfaceView.setVisibility(View.GONE);
} else {
@ -202,7 +202,7 @@ public class VideoPlayer extends Activity implements OnClickListener,
player.addListener(this);
mediaController.setMediaPlayer(new PlayerControl(player));
mediaController.setEnabled(true);
player.sendMessage(renderers[0], LibvpxVideoTrackRenderer.MSG_SET_VPX_SURFACE_VIEW,
player.sendMessage(renderers[0], LibvpxVideoTrackRenderer.MSG_SET_OUTPUT_BUFFER_RENDERER,
vpxVideoSurfaceView);
player.prepare(renderers);
player.setPlayWhenReady(true);

View File

@ -23,8 +23,7 @@ import com.google.android.exoplayer.MediaFormatHolder;
import com.google.android.exoplayer.SampleSource;
import com.google.android.exoplayer.SampleSourceTrackRenderer;
import com.google.android.exoplayer.TrackRenderer;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.InputBuffer;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.VpxInputBuffer;
import com.google.android.exoplayer.util.MimeTypes;
import android.graphics.Bitmap;
@ -88,7 +87,12 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
* should be the target {@link Surface}, or null.
*/
public static final int MSG_SET_SURFACE = 1;
public static final int MSG_SET_VPX_SURFACE_VIEW = 2;
/**
* The type of a message that can be passed to an instance of this class via
* {@link ExoPlayer#sendMessage} or {@link ExoPlayer#blockingSendMessage}. The message object
* should be the target {@link VpxOutputBufferRenderer}, or null.
*/
public static final int MSG_SET_OUTPUT_BUFFER_RENDERER = 2;
public final CodecCounters codecCounters = new CodecCounters();
@ -100,14 +104,15 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
private MediaFormat format;
private VpxDecoderWrapper decoder;
private InputBuffer inputBuffer;
private OutputBuffer outputBuffer;
private VpxInputBuffer inputBuffer;
private VpxOutputBuffer outputBuffer;
private VpxOutputBuffer renderedOutputBuffer;
private Bitmap bitmap;
private boolean drawnToSurface;
private boolean renderedFirstFrame;
private Surface surface;
private VpxVideoSurfaceView vpxVideoSurfaceView;
private VpxOutputBufferRenderer outputBufferRenderer;
private int outputMode;
private boolean inputStreamEnded;
@ -176,7 +181,13 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
return;
}
sourceIsReady = continueBufferingSource(positionUs);
checkForDiscontinuity(positionUs);
try {
checkForDiscontinuity(positionUs);
} catch (VpxDecoderException e) {
notifyDecoderError(e);
throw new ExoPlaybackException(e);
}
// Try and read a format if we don't have one already.
if (format == null && !readFormat(positionUs)) {
@ -215,7 +226,8 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
if (outputBuffer.flags == VpxDecoderWrapper.FLAG_END_OF_STREAM) {
outputStreamEnded = true;
releaseOutputBuffer();
releaseOutputBuffer(outputBuffer);
outputBuffer = null;
return;
}
@ -229,7 +241,8 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
if (droppedFrameCount == maxDroppedFrameCountToNotify) {
notifyAndResetDroppedFrameCount();
}
releaseOutputBuffer();
releaseOutputBuffer(outputBuffer);
outputBuffer = null;
return;
}
@ -266,18 +279,23 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
} else if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_YUV && vpxVideoSurfaceView != null) {
vpxVideoSurfaceView.renderFrame(outputBuffer);
} else if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_YUV && outputBufferRenderer != null) {
outputBufferRenderer.setOutputBuffer(outputBuffer);
}
releaseOutputBuffer();
}
private void releaseOutputBuffer() throws VpxDecoderException {
decoder.releaseOutputBuffer(outputBuffer);
// Release the output buffer we rendered during the previous cycle, now that we delivered a new
// buffer.
releaseOutputBuffer(renderedOutputBuffer);
renderedOutputBuffer = outputBuffer;
outputBuffer = null;
}
private void renderRgbFrame(OutputBuffer outputBuffer, boolean scale) {
private void releaseOutputBuffer(VpxOutputBuffer buffer) throws VpxDecoderException {
if (buffer != null) {
decoder.releaseOutputBuffer(buffer);
}
}
private void renderRgbFrame(VpxOutputBuffer outputBuffer, boolean scale) {
if (bitmap == null || bitmap.getWidth() != outputBuffer.width
|| bitmap.getHeight() != outputBuffer.height) {
bitmap = Bitmap.createBitmap(outputBuffer.width, outputBuffer.height, Bitmap.Config.RGB_565);
@ -332,7 +350,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
return true;
}
private void checkForDiscontinuity(long positionUs) {
private void checkForDiscontinuity(long positionUs) throws VpxDecoderException {
if (decoder == null) {
return;
}
@ -342,9 +360,12 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
}
}
private void flushDecoder() {
private void flushDecoder() throws VpxDecoderException {
inputBuffer = null;
VpxOutputBuffer bufferToRelease = outputBuffer;
// Set this to null now because releaseOutputBuffer could throw an exception.
outputBuffer = null;
releaseOutputBuffer(bufferToRelease);
decoder.flush();
}
@ -417,8 +438,8 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
if (messageType == MSG_SET_SURFACE) {
setSurface((Surface) message);
} else if (messageType == MSG_SET_VPX_SURFACE_VIEW) {
setVpxVideoSurfaceView((VpxVideoSurfaceView) message);
} else if (messageType == MSG_SET_OUTPUT_BUFFER_RENDERER) {
setOutputBufferRenderer((VpxOutputBufferRenderer) message);
} else {
super.handleMessage(messageType, message);
}
@ -429,7 +450,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
return;
}
this.surface = surface;
vpxVideoSurfaceView = null;
outputBufferRenderer = null;
outputMode = (surface != null) ? VpxDecoder.OUTPUT_MODE_RGB : VpxDecoder.OUTPUT_MODE_UNKNOWN;
if (decoder != null) {
decoder.setOutputMode(outputMode);
@ -437,20 +458,20 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
drawnToSurface = false;
}
private void setVpxVideoSurfaceView(VpxVideoSurfaceView vpxVideoSurfaceView) {
if (this.vpxVideoSurfaceView == vpxVideoSurfaceView) {
private void setOutputBufferRenderer(VpxOutputBufferRenderer outputBufferRenderer) {
if (this.outputBufferRenderer == outputBufferRenderer) {
return;
}
this.vpxVideoSurfaceView = vpxVideoSurfaceView;
this.outputBufferRenderer = outputBufferRenderer;
surface = null;
outputMode =
(vpxVideoSurfaceView != null) ? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_UNKNOWN;
outputMode = (outputBufferRenderer != null)
? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_UNKNOWN;
if (decoder != null) {
decoder.setOutputMode(outputMode);
}
}
private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) {
private void notifyIfVideoSizeChanged(final VpxOutputBuffer outputBuffer) {
if (previousWidth == -1 || previousHeight == -1
|| previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) {
previousWidth = outputBuffer.width;

View File

@ -15,8 +15,6 @@
*/
package com.google.android.exoplayer.ext.vp9;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
import java.nio.ByteBuffer;
/**
@ -64,7 +62,7 @@ import java.nio.ByteBuffer;
* @return 0 on success with a frame to render. 1 on success without a frame to render.
* @throws VpxDecoderException on decode failure.
*/
public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer)
public int decode(ByteBuffer encoded, int size, VpxOutputBuffer outputBuffer)
throws VpxDecoderException {
if (vpxDecode(vpxDecContext, encoded, size) != 0) {
throw new VpxDecoderException("libvpx decode error: " + vpxGetErrorMessage(vpxDecContext));
@ -94,7 +92,7 @@ import java.nio.ByteBuffer;
private native long vpxInit();
private native long vpxClose(long context);
private native long vpxDecode(long context, ByteBuffer encoded, int length);
private native int vpxGetFrame(long context, OutputBuffer outputBuffer);
private native int vpxGetFrame(long context, VpxOutputBuffer outputBuffer);
private native String vpxGetErrorMessage(long context);
}

View File

@ -28,16 +28,19 @@ import java.util.LinkedList;
public static final int FLAG_END_OF_STREAM = 1;
private static final int INPUT_BUFFER_SIZE = 768 * 1024; // Value based on cs/SoftVpx.cpp.
/**
* The total number of output buffers. {@link LibvpxVideoTrackRenderer} may hold on to 2 buffers
* at a time so this value should be high enough considering LibvpxVideoTrackRenderer requirement.
*/
private static final int NUM_BUFFERS = 16;
private final Object lock;
private final LinkedList<InputBuffer> dequeuedInputBuffers;
private final LinkedList<InputBuffer> queuedInputBuffers;
private final LinkedList<OutputBuffer> queuedOutputBuffers;
private final LinkedList<OutputBuffer> dequeuedOutputBuffers;
private final InputBuffer[] availableInputBuffers;
private final OutputBuffer[] availableOutputBuffers;
private final LinkedList<VpxInputBuffer> dequeuedInputBuffers;
private final LinkedList<VpxInputBuffer> queuedInputBuffers;
private final LinkedList<VpxOutputBuffer> queuedOutputBuffers;
private final VpxInputBuffer[] availableInputBuffers;
private final VpxOutputBuffer[] availableOutputBuffers;
private int availableInputBufferCount;
private int availableOutputBufferCount;
@ -57,14 +60,13 @@ import java.util.LinkedList;
dequeuedInputBuffers = new LinkedList<>();
queuedInputBuffers = new LinkedList<>();
queuedOutputBuffers = new LinkedList<>();
dequeuedOutputBuffers = new LinkedList<>();
availableInputBuffers = new InputBuffer[NUM_BUFFERS];
availableOutputBuffers = new OutputBuffer[NUM_BUFFERS];
availableInputBuffers = new VpxInputBuffer[NUM_BUFFERS];
availableOutputBuffers = new VpxOutputBuffer[NUM_BUFFERS];
availableInputBufferCount = NUM_BUFFERS;
availableOutputBufferCount = NUM_BUFFERS;
for (int i = 0; i < NUM_BUFFERS; i++) {
availableInputBuffers[i] = new InputBuffer();
availableOutputBuffers[i] = new OutputBuffer();
availableInputBuffers[i] = new VpxInputBuffer();
availableOutputBuffers[i] = new VpxOutputBuffer();
}
}
@ -72,13 +74,13 @@ import java.util.LinkedList;
this.outputMode = outputMode;
}
public InputBuffer dequeueInputBuffer() throws VpxDecoderException {
public VpxInputBuffer dequeueInputBuffer() throws VpxDecoderException {
synchronized (lock) {
maybeThrowDecoderError();
if (availableInputBufferCount == 0) {
return null;
}
InputBuffer inputBuffer = availableInputBuffers[--availableInputBufferCount];
VpxInputBuffer inputBuffer = availableInputBuffers[--availableInputBufferCount];
inputBuffer.flags = 0;
inputBuffer.sampleHolder.clearData();
dequeuedInputBuffers.addLast(inputBuffer);
@ -86,7 +88,7 @@ import java.util.LinkedList;
}
}
public void queueInputBuffer(InputBuffer inputBuffer) throws VpxDecoderException {
public void queueInputBuffer(VpxInputBuffer inputBuffer) throws VpxDecoderException {
synchronized (lock) {
maybeThrowDecoderError();
dequeuedInputBuffers.remove(inputBuffer);
@ -95,22 +97,20 @@ import java.util.LinkedList;
}
}
public OutputBuffer dequeueOutputBuffer() throws VpxDecoderException {
public VpxOutputBuffer dequeueOutputBuffer() throws VpxDecoderException {
synchronized (lock) {
maybeThrowDecoderError();
if (queuedOutputBuffers.isEmpty()) {
return null;
}
OutputBuffer outputBuffer = queuedOutputBuffers.removeFirst();
dequeuedOutputBuffers.add(outputBuffer);
VpxOutputBuffer outputBuffer = queuedOutputBuffers.removeFirst();
return outputBuffer;
}
}
public void releaseOutputBuffer(OutputBuffer outputBuffer) throws VpxDecoderException {
public void releaseOutputBuffer(VpxOutputBuffer outputBuffer) throws VpxDecoderException {
synchronized (lock) {
maybeThrowDecoderError();
dequeuedOutputBuffers.remove(outputBuffer);
availableOutputBuffers[availableOutputBufferCount++] = outputBuffer;
maybeNotifyDecodeLoop();
}
@ -128,9 +128,6 @@ import java.util.LinkedList;
while (!queuedOutputBuffers.isEmpty()) {
availableOutputBuffers[availableOutputBufferCount++] = queuedOutputBuffers.removeFirst();
}
while (!dequeuedOutputBuffers.isEmpty()) {
availableOutputBuffers[availableOutputBufferCount++] = dequeuedOutputBuffers.removeFirst();
}
}
}
@ -187,8 +184,8 @@ import java.util.LinkedList;
private boolean decodeBuffer(VpxDecoder decoder) throws InterruptedException,
VpxDecoderException {
InputBuffer inputBuffer;
OutputBuffer outputBuffer;
VpxInputBuffer inputBuffer;
VpxOutputBuffer outputBuffer;
// Wait until we have an input buffer to decode, and an output buffer to decode into.
synchronized (lock) {
@ -238,7 +235,7 @@ import java.util.LinkedList;
return true;
}
/* package */ static final class InputBuffer {
/* package */ static final class VpxInputBuffer {
public final SampleHolder sampleHolder;
@ -246,75 +243,11 @@ import java.util.LinkedList;
public int height;
public int flags;
public InputBuffer() {
public VpxInputBuffer() {
sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT);
sampleHolder.data = ByteBuffer.allocateDirect(INPUT_BUFFER_SIZE);
}
}
/* package */ static final class OutputBuffer {
public ByteBuffer data;
public long timestampUs;
public int width;
public int height;
public int flags;
public ByteBuffer[] yuvPlanes;
public int[] yuvStrides;
public int mode;
/**
* This method is called from C++ through JNI after decoding is done. It will resize the
* buffer based on the given dimensions.
*/
public void initForRgbFrame(int width, int height) {
this.width = width;
this.height = height;
int minimumRgbSize = width * height * 2;
if (data == null || data.capacity() < minimumRgbSize) {
data = ByteBuffer.allocateDirect(minimumRgbSize);
yuvPlanes = null;
}
data.position(0);
data.limit(minimumRgbSize);
}
/**
* This method is called from C++ through JNI after decoding is done. It will resize the
* buffer based on the given stride.
*/
public void initForYuvFrame(int width, int height, int yStride, int uvStride) {
this.width = width;
this.height = height;
int yLength = yStride * height;
int uvLength = uvStride * ((height + 1) / 2);
int minimumYuvSize = yLength + (uvLength * 2);
if (data == null || data.capacity() < minimumYuvSize) {
data = ByteBuffer.allocateDirect(minimumYuvSize);
}
data.limit(minimumYuvSize);
if (yuvPlanes == null) {
yuvPlanes = new ByteBuffer[3];
}
// Rewrapping has to be done on every frame since the stride might have changed.
data.position(0);
yuvPlanes[0] = data.slice();
yuvPlanes[0].limit(yLength);
data.position(yLength);
yuvPlanes[1] = data.slice();
yuvPlanes[1].limit(uvLength);
data.position(yLength + uvLength);
yuvPlanes[2] = data.slice();
yuvPlanes[2].limit(uvLength);
if (yuvStrides == null) {
yuvStrides = new int[3];
}
yuvStrides[0] = yStride;
yuvStrides[1] = uvStride;
yuvStrides[2] = uvStride;
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.ext.vp9;
import java.nio.ByteBuffer;
/**
* OutputBuffer for storing the video frame.
*/
public final class VpxOutputBuffer {
public ByteBuffer data;
public long timestampUs;
public int width;
public int height;
public int flags;
public ByteBuffer[] yuvPlanes;
public int[] yuvStrides;
public int mode;
/**
* This method is called from C++ through JNI after decoding is done. It will resize the
* buffer based on the given dimensions.
*/
public void initForRgbFrame(int width, int height) {
this.width = width;
this.height = height;
int minimumRgbSize = width * height * 2;
if (data == null || data.capacity() < minimumRgbSize) {
data = ByteBuffer.allocateDirect(minimumRgbSize);
yuvPlanes = null;
}
data.position(0);
data.limit(minimumRgbSize);
}
/**
* This method is called from C++ through JNI after decoding is done. It will resize the
* buffer based on the given stride.
*/
public void initForYuvFrame(int width, int height, int yStride, int uvStride) {
this.width = width;
this.height = height;
int yLength = yStride * height;
int uvLength = uvStride * ((height + 1) / 2);
int minimumYuvSize = yLength + (uvLength * 2);
if (data == null || data.capacity() < minimumYuvSize) {
data = ByteBuffer.allocateDirect(minimumYuvSize);
}
data.limit(minimumYuvSize);
if (yuvPlanes == null) {
yuvPlanes = new ByteBuffer[3];
}
// Rewrapping has to be done on every frame since the stride might have changed.
data.position(0);
yuvPlanes[0] = data.slice();
yuvPlanes[0].limit(yLength);
data.position(yLength);
yuvPlanes[1] = data.slice();
yuvPlanes[1].limit(uvLength);
data.position(yLength + uvLength);
yuvPlanes[2] = data.slice();
yuvPlanes[2].limit(uvLength);
if (yuvStrides == null) {
yuvStrides = new int[3];
}
yuvStrides[0] = yStride;
yuvStrides[1] = uvStride;
yuvStrides[2] = uvStride;
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.ext.vp9;
/**
* Renders the {@link VpxOutputBuffer}.
*/
public interface VpxOutputBufferRenderer {
/**
* Sets the output buffer to be rendered.
*/
void setOutputBuffer(VpxOutputBuffer outputBuffer);
}

View File

@ -15,8 +15,6 @@
*/
package com.google.android.exoplayer.ext.vp9;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
@ -67,7 +65,7 @@ import javax.microedition.khronos.opengles.GL10;
private int program;
private int texLocation;
private FloatBuffer textureCoords;
private volatile OutputBuffer outputBuffer;
private VpxOutputBuffer outputBuffer;
private int previousWidth;
private int previousStride;
@ -82,7 +80,7 @@ import javax.microedition.khronos.opengles.GL10;
*
* @param outputBuffer OutputBuffer containing the YUV Frame to be rendered
*/
public void setFrame(OutputBuffer outputBuffer) {
public synchronized void setFrame(VpxOutputBuffer outputBuffer) {
this.outputBuffer = outputBuffer;
}
@ -121,33 +119,36 @@ import javax.microedition.khronos.opengles.GL10;
@Override
public void onDrawFrame(GL10 unused) {
OutputBuffer outputBuffer = this.outputBuffer;
synchronized (this) {
VpxOutputBuffer outputBuffer = this.outputBuffer;
if (outputBuffer == null) {
// Nothing to render yet.
return;
}
for (int i = 0; i < 3; i++) {
int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2;
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
outputBuffer.yuvStrides[i], h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
outputBuffer.yuvPlanes[i]);
}
// Set cropping of stride if either width or stride has changed.
if (previousWidth != outputBuffer.width || previousStride != outputBuffer.yuvStrides[0]) {
float crop = (float) outputBuffer.width / outputBuffer.yuvStrides[0];
textureCoords = nativeFloatBuffer(
0.0f, 0.0f,
0.0f, 1.0f,
crop, 0.0f,
crop, 1.0f);
GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
previousWidth = outputBuffer.width;
previousStride = outputBuffer.yuvStrides[0];
}
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (outputBuffer == null) {
// Nothing to render yet.
return;
}
for (int i = 0; i < 3; i++) {
int h = (i == 0) ? outputBuffer.height : (outputBuffer.height + 1) / 2;
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, outputBuffer.yuvStrides[i],
h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, outputBuffer.yuvPlanes[i]);
}
// Set cropping of stride if either width or stride has changed.
if (previousWidth != outputBuffer.width || previousStride != outputBuffer.yuvStrides[0]) {
float crop = (float) outputBuffer.width / outputBuffer.yuvStrides[0];
textureCoords = nativeFloatBuffer(
0.0f, 0.0f,
0.0f, 1.0f,
crop, 0.0f,
crop, 1.0f);
GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
previousWidth = outputBuffer.width;
previousStride = outputBuffer.yuvStrides[0];
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkNoGLES2Error();
}

View File

@ -15,8 +15,6 @@
*/
package com.google.android.exoplayer.ext.vp9;
import com.google.android.exoplayer.ext.vp9.VpxDecoderWrapper.OutputBuffer;
import android.annotation.TargetApi;
import android.content.Context;
import android.opengl.GLSurfaceView;
@ -26,7 +24,7 @@ import android.util.AttributeSet;
* A GLSurfaceView extension that scales itself to the given aspect ratio.
*/
@TargetApi(11)
public class VpxVideoSurfaceView extends GLSurfaceView {
public class VpxVideoSurfaceView extends GLSurfaceView implements VpxOutputBufferRenderer {
private final VpxRenderer renderer;
@ -43,7 +41,8 @@ public class VpxVideoSurfaceView extends GLSurfaceView {
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void renderFrame(OutputBuffer outputBuffer) {
@Override
public void setOutputBuffer(VpxOutputBuffer outputBuffer) {
renderer.setFrame(outputBuffer);
requestRender();
}

View File

@ -44,7 +44,7 @@
Java_com_google_android_exoplayer_ext_vp9_VpxDecoder_ ## NAME \
(JNIEnv* env, jobject thiz, ##__VA_ARGS__)\
// JNI references for OutputBuffer class.
// JNI references for VpxOutputBuffer class.
static jmethodID initForRgbFrame;
static jmethodID initForYuvFrame;
static jfieldID dataField;
@ -69,7 +69,7 @@ FUNC(jlong, vpxInit) {
// Populate JNI References.
const jclass outputBufferClass = env->FindClass(
"com/google/android/exoplayer/ext/vp9/VpxDecoderWrapper$OutputBuffer");
"com/google/android/exoplayer/ext/vp9/VpxOutputBuffer");
initForYuvFrame = env->GetMethodID(outputBufferClass, "initForYuvFrame",
"(IIII)V");
initForRgbFrame = env->GetMethodID(outputBufferClass, "initForRgbFrame",