Make output mode a tri-state flag instead of binary

This CL fixes the case when a surface is attached *after* playback
has started.
This commit is contained in:
Oliver Woodman 2015-11-25 17:03:11 +00:00
parent 61fb0c558e
commit dc94e44b1f
4 changed files with 65 additions and 29 deletions

View File

@ -108,7 +108,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
private boolean renderedFirstFrame; private boolean renderedFirstFrame;
private Surface surface; private Surface surface;
private VpxVideoSurfaceView vpxVideoSurfaceView; private VpxVideoSurfaceView vpxVideoSurfaceView;
private boolean outputRgb; private int outputMode;
private boolean inputStreamEnded; private boolean inputStreamEnded;
private boolean outputStreamEnded; private boolean outputStreamEnded;
@ -148,6 +148,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
previousWidth = -1; previousWidth = -1;
previousHeight = -1; previousHeight = -1;
formatHolder = new MediaFormatHolder(); formatHolder = new MediaFormatHolder();
outputMode = VpxDecoder.OUTPUT_MODE_UNKNOWN;
} }
/** /**
@ -184,9 +185,8 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
} }
// If we don't have a decoder yet, we need to instantiate one. // If we don't have a decoder yet, we need to instantiate one.
// TODO: Add support for dynamic switching between one type of surface to another.
if (decoder == null) { if (decoder == null) {
decoder = new VpxDecoderWrapper(outputRgb); decoder = new VpxDecoderWrapper(outputMode);
decoder.start(); decoder.start();
} }
@ -260,15 +260,15 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
private void renderBuffer() throws VpxDecoderException { private void renderBuffer() throws VpxDecoderException {
codecCounters.renderedOutputBufferCount++; codecCounters.renderedOutputBufferCount++;
notifyIfVideoSizeChanged(outputBuffer); notifyIfVideoSizeChanged(outputBuffer);
if (outputRgb) { if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_RGB && surface != null) {
renderRgbFrame(outputBuffer, scaleToFit); renderRgbFrame(outputBuffer, scaleToFit);
} else { if (!drawnToSurface) {
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
} else if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_YUV && vpxVideoSurfaceView != null) {
vpxVideoSurfaceView.renderFrame(outputBuffer); vpxVideoSurfaceView.renderFrame(outputBuffer);
} }
if (!drawnToSurface) {
drawnToSurface = true;
notifyDrawnToSurface(surface);
}
releaseOutputBuffer(); releaseOutputBuffer();
} }
@ -416,18 +416,40 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
@Override @Override
public void handleMessage(int messageType, Object message) throws ExoPlaybackException { public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
if (messageType == MSG_SET_SURFACE) { if (messageType == MSG_SET_SURFACE) {
surface = (Surface) message; setSurface((Surface) message);
vpxVideoSurfaceView = null;
outputRgb = true;
} else if (messageType == MSG_SET_VPX_SURFACE_VIEW) { } else if (messageType == MSG_SET_VPX_SURFACE_VIEW) {
vpxVideoSurfaceView = (VpxVideoSurfaceView) message; setVpxVideoSurfaceView((VpxVideoSurfaceView) message);
surface = null;
outputRgb = false;
} else { } else {
super.handleMessage(messageType, message); super.handleMessage(messageType, message);
} }
} }
private void setSurface(Surface surface) {
if (this.surface == surface) {
return;
}
this.surface = surface;
vpxVideoSurfaceView = null;
outputMode = (surface != null) ? VpxDecoder.OUTPUT_MODE_RGB : VpxDecoder.OUTPUT_MODE_UNKNOWN;
if (decoder != null) {
decoder.setOutputMode(outputMode);
}
drawnToSurface = false;
}
private void setVpxVideoSurfaceView(VpxVideoSurfaceView vpxVideoSurfaceView) {
if (this.vpxVideoSurfaceView == vpxVideoSurfaceView) {
return;
}
this.vpxVideoSurfaceView = vpxVideoSurfaceView;
surface = null;
outputMode =
(vpxVideoSurfaceView != null) ? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_UNKNOWN;
if (decoder != null) {
decoder.setOutputMode(outputMode);
}
}
private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) { private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) {
if (previousWidth == -1 || previousHeight == -1 if (previousWidth == -1 || previousHeight == -1
|| previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) { || previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) {

View File

@ -37,6 +37,10 @@ import java.nio.ByteBuffer;
IS_AVAILABLE = isAvailable; IS_AVAILABLE = isAvailable;
} }
public static final int OUTPUT_MODE_UNKNOWN = -1;
public static final int OUTPUT_MODE_YUV = 0;
public static final int OUTPUT_MODE_RGB = 1;
private final long vpxDecContext; private final long vpxDecContext;
/** /**
@ -57,17 +61,15 @@ import java.nio.ByteBuffer;
* @param encoded The encoded buffer. * @param encoded The encoded buffer.
* @param size Size of the encoded buffer. * @param size Size of the encoded buffer.
* @param outputBuffer The buffer into which the decoded frame should be written. * @param outputBuffer The buffer into which the decoded frame should be written.
* @param outputRgb True if the buffer should be converted to RGB color format. False if YUV
* format should be retained.
* @return 0 on success with a frame to render. 1 on success without a frame to render. * @return 0 on success with a frame to render. 1 on success without a frame to render.
* @throws VpxDecoderException on decode failure. * @throws VpxDecoderException on decode failure.
*/ */
public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer, boolean outputRgb) public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer)
throws VpxDecoderException { throws VpxDecoderException {
if (vpxDecode(vpxDecContext, encoded, size) != 0) { if (vpxDecode(vpxDecContext, encoded, size) != 0) {
throw new VpxDecoderException("libvpx decode error: " + vpxGetErrorMessage(vpxDecContext)); throw new VpxDecoderException("libvpx decode error: " + vpxGetErrorMessage(vpxDecContext));
} }
return vpxGetFrame(vpxDecContext, outputBuffer, outputRgb); return vpxGetFrame(vpxDecContext, outputBuffer);
} }
/** /**
@ -92,7 +94,7 @@ import java.nio.ByteBuffer;
private native long vpxInit(); private native long vpxInit();
private native long vpxClose(long context); private native long vpxClose(long context);
private native long vpxDecode(long context, ByteBuffer encoded, int length); private native long vpxDecode(long context, ByteBuffer encoded, int length);
private native int vpxGetFrame(long context, OutputBuffer outputBuffer, boolean outputRgb); private native int vpxGetFrame(long context, OutputBuffer outputBuffer);
private native String vpxGetErrorMessage(long context); private native String vpxGetErrorMessage(long context);
} }

View File

@ -31,7 +31,6 @@ import java.util.LinkedList;
private static final int NUM_BUFFERS = 16; private static final int NUM_BUFFERS = 16;
private final Object lock; private final Object lock;
private final boolean outputRgb;
private final LinkedList<InputBuffer> queuedInputBuffers; private final LinkedList<InputBuffer> queuedInputBuffers;
private final LinkedList<OutputBuffer> queuedOutputBuffers; private final LinkedList<OutputBuffer> queuedOutputBuffers;
@ -42,16 +41,17 @@ import java.util.LinkedList;
private boolean flushDecodedOutputBuffer; private boolean flushDecodedOutputBuffer;
private boolean released; private boolean released;
private int outputMode;
private VpxDecoderException decoderException; private VpxDecoderException decoderException;
/** /**
* @param outputRgb True if the decoded output is in RGB color format. False if it is in YUV * @param outputMode One of OUTPUT_MODE_* constants from {@link VpxDecoderWrapper}
* color format. * depending on the desired output mode.
*/ */
public VpxDecoderWrapper(boolean outputRgb) { public VpxDecoderWrapper(int outputMode) {
lock = new Object(); lock = new Object();
this.outputRgb = outputRgb; this.outputMode = outputMode;
queuedInputBuffers = new LinkedList<>(); queuedInputBuffers = new LinkedList<>();
queuedOutputBuffers = new LinkedList<>(); queuedOutputBuffers = new LinkedList<>();
availableInputBuffers = new InputBuffer[NUM_BUFFERS]; availableInputBuffers = new InputBuffer[NUM_BUFFERS];
@ -64,6 +64,10 @@ import java.util.LinkedList;
} }
} }
public void setOutputMode(int outputMode) {
this.outputMode = outputMode;
}
public InputBuffer getInputBuffer() throws VpxDecoderException { public InputBuffer getInputBuffer() throws VpxDecoderException {
synchronized (lock) { synchronized (lock) {
maybeThrowDecoderError(); maybeThrowDecoderError();
@ -192,8 +196,9 @@ import java.util.LinkedList;
SampleHolder sampleHolder = inputBuffer.sampleHolder; SampleHolder sampleHolder = inputBuffer.sampleHolder;
outputBuffer.timestampUs = sampleHolder.timeUs; outputBuffer.timestampUs = sampleHolder.timeUs;
outputBuffer.flags = 0; outputBuffer.flags = 0;
outputBuffer.mode = outputMode;
sampleHolder.data.position(sampleHolder.data.position() - sampleHolder.size); sampleHolder.data.position(sampleHolder.data.position() - sampleHolder.size);
decodeResult = decoder.decode(sampleHolder.data, sampleHolder.size, outputBuffer, outputRgb); decodeResult = decoder.decode(sampleHolder.data, sampleHolder.size, outputBuffer);
} }
synchronized (lock) { synchronized (lock) {
@ -242,6 +247,7 @@ import java.util.LinkedList;
public int flags; public int flags;
public ByteBuffer[] yuvPlanes; public ByteBuffer[] yuvPlanes;
public int[] yuvStrides; public int[] yuvStrides;
public int mode;
/** /**
* This method is called from C++ through JNI after decoding is done. It will resize the * This method is called from C++ through JNI after decoding is done. It will resize the

View File

@ -48,6 +48,7 @@
static jmethodID initForRgbFrame; static jmethodID initForRgbFrame;
static jmethodID initForYuvFrame; static jmethodID initForYuvFrame;
static jfieldID dataField; static jfieldID dataField;
static jfieldID outputModeField;
jint JNI_OnLoad(JavaVM* vm, void* reserved) { jint JNI_OnLoad(JavaVM* vm, void* reserved) {
JNIEnv* env; JNIEnv* env;
@ -75,6 +76,7 @@ FUNC(jlong, vpxInit) {
"(II)V"); "(II)V");
dataField = env->GetFieldID(outputBufferClass, "data", dataField = env->GetFieldID(outputBufferClass, "data",
"Ljava/nio/ByteBuffer;"); "Ljava/nio/ByteBuffer;");
outputModeField = env->GetFieldID(outputBufferClass, "mode", "I");
return reinterpret_cast<intptr_t>(context); return reinterpret_cast<intptr_t>(context);
} }
@ -99,7 +101,7 @@ FUNC(jlong, vpxClose, jlong jContext) {
return 0; return 0;
} }
FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) { FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer) {
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext); vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
vpx_codec_iter_t iter = NULL; vpx_codec_iter_t iter = NULL;
const vpx_image_t* const img = vpx_codec_get_frame(context, &iter); const vpx_image_t* const img = vpx_codec_get_frame(context, &iter);
@ -108,7 +110,11 @@ FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
return 1; return 1;
} }
if (isRGB == JNI_TRUE) { const int kOutputModeYuv = 0;
const int kOutputModeRgb = 1;
int outputMode = env->GetIntField(jOutputBuffer, outputModeField);
if (outputMode == kOutputModeRgb) {
// resize buffer if required. // resize buffer if required.
env->CallVoidMethod(jOutputBuffer, initForRgbFrame, img->d_w, img->d_h); env->CallVoidMethod(jOutputBuffer, initForRgbFrame, img->d_w, img->d_h);
@ -121,7 +127,7 @@ FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V], img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
dst, img->d_w * 2, img->d_w, img->d_h); dst, img->d_w * 2, img->d_w, img->d_h);
} else { } else if (outputMode == kOutputModeYuv) {
// resize buffer if required. // resize buffer if required.
env->CallVoidMethod(jOutputBuffer, initForYuvFrame, img->d_w, img->d_h, env->CallVoidMethod(jOutputBuffer, initForYuvFrame, img->d_w, img->d_h,
img->stride[VPX_PLANE_Y], img->stride[VPX_PLANE_U]); img->stride[VPX_PLANE_Y], img->stride[VPX_PLANE_U]);