Make output mode a tri-state flag instead of binary
This CL fixes the case when a surface is attached *after* playback has started.
This commit is contained in:
parent
61fb0c558e
commit
dc94e44b1f
@ -108,7 +108,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
|
||||
private boolean renderedFirstFrame;
|
||||
private Surface surface;
|
||||
private VpxVideoSurfaceView vpxVideoSurfaceView;
|
||||
private boolean outputRgb;
|
||||
private int outputMode;
|
||||
|
||||
private boolean inputStreamEnded;
|
||||
private boolean outputStreamEnded;
|
||||
@ -148,6 +148,7 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
|
||||
previousWidth = -1;
|
||||
previousHeight = -1;
|
||||
formatHolder = new MediaFormatHolder();
|
||||
outputMode = VpxDecoder.OUTPUT_MODE_UNKNOWN;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -184,9 +185,8 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
|
||||
}
|
||||
|
||||
// If we don't have a decoder yet, we need to instantiate one.
|
||||
// TODO: Add support for dynamic switching between one type of surface to another.
|
||||
if (decoder == null) {
|
||||
decoder = new VpxDecoderWrapper(outputRgb);
|
||||
decoder = new VpxDecoderWrapper(outputMode);
|
||||
decoder.start();
|
||||
}
|
||||
|
||||
@ -260,15 +260,15 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
|
||||
private void renderBuffer() throws VpxDecoderException {
|
||||
codecCounters.renderedOutputBufferCount++;
|
||||
notifyIfVideoSizeChanged(outputBuffer);
|
||||
if (outputRgb) {
|
||||
if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_RGB && surface != null) {
|
||||
renderRgbFrame(outputBuffer, scaleToFit);
|
||||
} else {
|
||||
if (!drawnToSurface) {
|
||||
drawnToSurface = true;
|
||||
notifyDrawnToSurface(surface);
|
||||
}
|
||||
} else if (outputBuffer.mode == VpxDecoder.OUTPUT_MODE_YUV && vpxVideoSurfaceView != null) {
|
||||
vpxVideoSurfaceView.renderFrame(outputBuffer);
|
||||
}
|
||||
if (!drawnToSurface) {
|
||||
drawnToSurface = true;
|
||||
notifyDrawnToSurface(surface);
|
||||
}
|
||||
releaseOutputBuffer();
|
||||
}
|
||||
|
||||
@ -416,18 +416,40 @@ public final class LibvpxVideoTrackRenderer extends SampleSourceTrackRenderer {
|
||||
@Override
|
||||
public void handleMessage(int messageType, Object message) throws ExoPlaybackException {
|
||||
if (messageType == MSG_SET_SURFACE) {
|
||||
surface = (Surface) message;
|
||||
vpxVideoSurfaceView = null;
|
||||
outputRgb = true;
|
||||
setSurface((Surface) message);
|
||||
} else if (messageType == MSG_SET_VPX_SURFACE_VIEW) {
|
||||
vpxVideoSurfaceView = (VpxVideoSurfaceView) message;
|
||||
surface = null;
|
||||
outputRgb = false;
|
||||
setVpxVideoSurfaceView((VpxVideoSurfaceView) message);
|
||||
} else {
|
||||
super.handleMessage(messageType, message);
|
||||
}
|
||||
}
|
||||
|
||||
private void setSurface(Surface surface) {
|
||||
if (this.surface == surface) {
|
||||
return;
|
||||
}
|
||||
this.surface = surface;
|
||||
vpxVideoSurfaceView = null;
|
||||
outputMode = (surface != null) ? VpxDecoder.OUTPUT_MODE_RGB : VpxDecoder.OUTPUT_MODE_UNKNOWN;
|
||||
if (decoder != null) {
|
||||
decoder.setOutputMode(outputMode);
|
||||
}
|
||||
drawnToSurface = false;
|
||||
}
|
||||
|
||||
private void setVpxVideoSurfaceView(VpxVideoSurfaceView vpxVideoSurfaceView) {
|
||||
if (this.vpxVideoSurfaceView == vpxVideoSurfaceView) {
|
||||
return;
|
||||
}
|
||||
this.vpxVideoSurfaceView = vpxVideoSurfaceView;
|
||||
surface = null;
|
||||
outputMode =
|
||||
(vpxVideoSurfaceView != null) ? VpxDecoder.OUTPUT_MODE_YUV : VpxDecoder.OUTPUT_MODE_UNKNOWN;
|
||||
if (decoder != null) {
|
||||
decoder.setOutputMode(outputMode);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyIfVideoSizeChanged(final OutputBuffer outputBuffer) {
|
||||
if (previousWidth == -1 || previousHeight == -1
|
||||
|| previousWidth != outputBuffer.width || previousHeight != outputBuffer.height) {
|
||||
|
@ -37,6 +37,10 @@ import java.nio.ByteBuffer;
|
||||
IS_AVAILABLE = isAvailable;
|
||||
}
|
||||
|
||||
public static final int OUTPUT_MODE_UNKNOWN = -1;
|
||||
public static final int OUTPUT_MODE_YUV = 0;
|
||||
public static final int OUTPUT_MODE_RGB = 1;
|
||||
|
||||
private final long vpxDecContext;
|
||||
|
||||
/**
|
||||
@ -57,17 +61,15 @@ import java.nio.ByteBuffer;
|
||||
* @param encoded The encoded buffer.
|
||||
* @param size Size of the encoded buffer.
|
||||
* @param outputBuffer The buffer into which the decoded frame should be written.
|
||||
* @param outputRgb True if the buffer should be converted to RGB color format. False if YUV
|
||||
* format should be retained.
|
||||
* @return 0 on success with a frame to render. 1 on success without a frame to render.
|
||||
* @throws VpxDecoderException on decode failure.
|
||||
*/
|
||||
public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer, boolean outputRgb)
|
||||
public int decode(ByteBuffer encoded, int size, OutputBuffer outputBuffer)
|
||||
throws VpxDecoderException {
|
||||
if (vpxDecode(vpxDecContext, encoded, size) != 0) {
|
||||
throw new VpxDecoderException("libvpx decode error: " + vpxGetErrorMessage(vpxDecContext));
|
||||
}
|
||||
return vpxGetFrame(vpxDecContext, outputBuffer, outputRgb);
|
||||
return vpxGetFrame(vpxDecContext, outputBuffer);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -92,7 +94,7 @@ import java.nio.ByteBuffer;
|
||||
private native long vpxInit();
|
||||
private native long vpxClose(long context);
|
||||
private native long vpxDecode(long context, ByteBuffer encoded, int length);
|
||||
private native int vpxGetFrame(long context, OutputBuffer outputBuffer, boolean outputRgb);
|
||||
private native int vpxGetFrame(long context, OutputBuffer outputBuffer);
|
||||
private native String vpxGetErrorMessage(long context);
|
||||
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ import java.util.LinkedList;
|
||||
private static final int NUM_BUFFERS = 16;
|
||||
|
||||
private final Object lock;
|
||||
private final boolean outputRgb;
|
||||
|
||||
private final LinkedList<InputBuffer> queuedInputBuffers;
|
||||
private final LinkedList<OutputBuffer> queuedOutputBuffers;
|
||||
@ -42,16 +41,17 @@ import java.util.LinkedList;
|
||||
|
||||
private boolean flushDecodedOutputBuffer;
|
||||
private boolean released;
|
||||
private int outputMode;
|
||||
|
||||
private VpxDecoderException decoderException;
|
||||
|
||||
/**
|
||||
* @param outputRgb True if the decoded output is in RGB color format. False if it is in YUV
|
||||
* color format.
|
||||
* @param outputMode One of OUTPUT_MODE_* constants from {@link VpxDecoderWrapper}
|
||||
* depending on the desired output mode.
|
||||
*/
|
||||
public VpxDecoderWrapper(boolean outputRgb) {
|
||||
public VpxDecoderWrapper(int outputMode) {
|
||||
lock = new Object();
|
||||
this.outputRgb = outputRgb;
|
||||
this.outputMode = outputMode;
|
||||
queuedInputBuffers = new LinkedList<>();
|
||||
queuedOutputBuffers = new LinkedList<>();
|
||||
availableInputBuffers = new InputBuffer[NUM_BUFFERS];
|
||||
@ -64,6 +64,10 @@ import java.util.LinkedList;
|
||||
}
|
||||
}
|
||||
|
||||
public void setOutputMode(int outputMode) {
|
||||
this.outputMode = outputMode;
|
||||
}
|
||||
|
||||
public InputBuffer getInputBuffer() throws VpxDecoderException {
|
||||
synchronized (lock) {
|
||||
maybeThrowDecoderError();
|
||||
@ -192,8 +196,9 @@ import java.util.LinkedList;
|
||||
SampleHolder sampleHolder = inputBuffer.sampleHolder;
|
||||
outputBuffer.timestampUs = sampleHolder.timeUs;
|
||||
outputBuffer.flags = 0;
|
||||
outputBuffer.mode = outputMode;
|
||||
sampleHolder.data.position(sampleHolder.data.position() - sampleHolder.size);
|
||||
decodeResult = decoder.decode(sampleHolder.data, sampleHolder.size, outputBuffer, outputRgb);
|
||||
decodeResult = decoder.decode(sampleHolder.data, sampleHolder.size, outputBuffer);
|
||||
}
|
||||
|
||||
synchronized (lock) {
|
||||
@ -242,6 +247,7 @@ import java.util.LinkedList;
|
||||
public int flags;
|
||||
public ByteBuffer[] yuvPlanes;
|
||||
public int[] yuvStrides;
|
||||
public int mode;
|
||||
|
||||
/**
|
||||
* This method is called from C++ through JNI after decoding is done. It will resize the
|
||||
|
@ -48,6 +48,7 @@
|
||||
static jmethodID initForRgbFrame;
|
||||
static jmethodID initForYuvFrame;
|
||||
static jfieldID dataField;
|
||||
static jfieldID outputModeField;
|
||||
|
||||
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
|
||||
JNIEnv* env;
|
||||
@ -75,6 +76,7 @@ FUNC(jlong, vpxInit) {
|
||||
"(II)V");
|
||||
dataField = env->GetFieldID(outputBufferClass, "data",
|
||||
"Ljava/nio/ByteBuffer;");
|
||||
outputModeField = env->GetFieldID(outputBufferClass, "mode", "I");
|
||||
|
||||
return reinterpret_cast<intptr_t>(context);
|
||||
}
|
||||
@ -99,7 +101,7 @@ FUNC(jlong, vpxClose, jlong jContext) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
|
||||
FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer) {
|
||||
vpx_codec_ctx_t* const context = reinterpret_cast<vpx_codec_ctx_t*>(jContext);
|
||||
vpx_codec_iter_t iter = NULL;
|
||||
const vpx_image_t* const img = vpx_codec_get_frame(context, &iter);
|
||||
@ -108,7 +110,11 @@ FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (isRGB == JNI_TRUE) {
|
||||
const int kOutputModeYuv = 0;
|
||||
const int kOutputModeRgb = 1;
|
||||
|
||||
int outputMode = env->GetIntField(jOutputBuffer, outputModeField);
|
||||
if (outputMode == kOutputModeRgb) {
|
||||
// resize buffer if required.
|
||||
env->CallVoidMethod(jOutputBuffer, initForRgbFrame, img->d_w, img->d_h);
|
||||
|
||||
@ -121,7 +127,7 @@ FUNC(jint, vpxGetFrame, jlong jContext, jobject jOutputBuffer, jboolean isRGB) {
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
dst, img->d_w * 2, img->d_w, img->d_h);
|
||||
} else {
|
||||
} else if (outputMode == kOutputModeYuv) {
|
||||
// resize buffer if required.
|
||||
env->CallVoidMethod(jOutputBuffer, initForYuvFrame, img->d_w, img->d_h,
|
||||
img->stride[VPX_PLANE_Y], img->stride[VPX_PLANE_U]);
|
||||
|
Loading…
x
Reference in New Issue
Block a user