Add monoscopic 360 surface type to PlayerView

Using this surface it's possible to play 360 videos in a non-VR Activity that is
affected by phone and touch input.

RELNOTES=true

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=205720776
This commit is contained in:
eguven 2018-07-23 14:05:27 -07:00 committed by Oliver Woodman
parent 01b69854ff
commit 9c337c8806
13 changed files with 1430 additions and 12 deletions

View File

@ -65,6 +65,7 @@
* Allow setting the `Looper`, which is used to access the player, in
`ExoPlayerFactory` ([#4278](https://github.com/google/ExoPlayer/issues/4278)).
* Use default Deserializers if non given to DownloadManager.
* Add monoscopic 360 surface type to PlayerView.
* Deprecate `Player.DefaultEventListener` as selective listener overrides can
be directly made with the `Player.EventListener` interface.
* Deprecate `DefaultAnalyticsListener` as selective listener overrides can be

View File

@ -191,6 +191,9 @@ public class PlayerActivity extends Activity
super.onStart();
if (Util.SDK_INT > 23) {
initializePlayer();
if (playerView != null) {
playerView.onResume();
}
}
}
@ -199,6 +202,9 @@ public class PlayerActivity extends Activity
super.onResume();
if (Util.SDK_INT <= 23 || player == null) {
initializePlayer();
if (playerView != null) {
playerView.onResume();
}
}
}
@ -206,6 +212,9 @@ public class PlayerActivity extends Activity
public void onPause() {
super.onPause();
if (Util.SDK_INT <= 23) {
if (playerView != null) {
playerView.onPause();
}
releasePlayer();
}
}
@ -214,6 +223,9 @@ public class PlayerActivity extends Activity
public void onStop() {
super.onStop();
if (Util.SDK_INT > 23) {
if (playerView != null) {
playerView.onPause();
}
releasePlayer();
}
}

View File

@ -80,6 +80,9 @@ public final class C {
/** The number of bits per byte. */
public static final int BITS_PER_BYTE = 8;
/** The number of bytes per float. */
public static final int BYTES_PER_FLOAT = 4;
/**
* The name of the ASCII charset.
*/

View File

@ -51,6 +51,9 @@ public final class ExoPlayerLibraryInfo {
*/
public static final boolean ASSERTIONS_ENABLED = true;
/** Whether an exception should be thrown in case of an OpenGl error. */
public static final boolean GL_ASSERTIONS_ENABLED = false;
/**
* Whether the library was compiled with {@link com.google.android.exoplayer2.util.TraceUtil}
* trace enabled.

View File

@ -42,6 +42,7 @@ dependencies {
implementation 'com.android.support:support-media-compat:' + supportLibraryVersion
implementation 'com.android.support:support-annotations:' + supportLibraryVersion
compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
testImplementation project(modulePrefix + 'testutils-robolectric')
}
ext {

View File

@ -17,6 +17,7 @@ package com.google.android.exoplayer2.ui;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
@ -30,6 +31,7 @@ import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
@ -44,6 +46,7 @@ import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.PlaybackPreparer;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Player.DiscontinuityReason;
import com.google.android.exoplayer2.Player.VideoComponent;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.id3.ApicFrame;
import com.google.android.exoplayer2.source.TrackGroupArray;
@ -52,6 +55,7 @@ import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.ui.AspectRatioFrameLayout.ResizeMode;
import com.google.android.exoplayer2.ui.spherical.SphericalSurfaceView;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.ErrorMessageProvider;
import com.google.android.exoplayer2.util.RepeatModeUtil;
@ -118,11 +122,11 @@ import java.util.List;
* <li>Default: {@code fit}
* </ul>
* <li><b>{@code surface_type}</b> - The type of surface view used for video playbacks. Valid
* values are {@code surface_view}, {@code texture_view} and {@code none}. Using {@code none}
* is recommended for audio only applications, since creating the surface can be expensive.
* Using {@code surface_view} is recommended for video applications. Note, TextureView can
* only be used in a hardware accelerated window. When rendered in software, TextureView will
* draw nothing.
* values are {@code surface_view}, {@code texture_view}, {@code spherical_view} and {@code
* none}. Using {@code none} is recommended for audio only applications, since creating the
* surface can be expensive. Using {@code surface_view} is recommended for video applications.
* Note, TextureView can only be used in a hardware accelerated window. When rendered in
* software, TextureView will draw nothing.
* <ul>
* <li>Corresponding method: None
* <li>Default: {@code surface_view}
@ -231,6 +235,7 @@ public class PlayerView extends FrameLayout {
private static final int SURFACE_TYPE_NONE = 0;
private static final int SURFACE_TYPE_SURFACE_VIEW = 1;
private static final int SURFACE_TYPE_TEXTURE_VIEW = 2;
private static final int SURFACE_TYPE_MONO360_VIEW = 3;
private final AspectRatioFrameLayout contentFrame;
private final View shutterView;
@ -351,10 +356,20 @@ public class PlayerView extends FrameLayout {
ViewGroup.LayoutParams params =
new ViewGroup.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
surfaceView =
surfaceType == SURFACE_TYPE_TEXTURE_VIEW
? new TextureView(context)
: new SurfaceView(context);
switch (surfaceType) {
case SURFACE_TYPE_TEXTURE_VIEW:
surfaceView = new TextureView(context);
break;
case SURFACE_TYPE_MONO360_VIEW:
Assertions.checkState(Util.SDK_INT >= 15);
SphericalSurfaceView sphericalSurfaceView = new SphericalSurfaceView(context);
sphericalSurfaceView.setSurfaceListener(componentListener);
surfaceView = sphericalSurfaceView;
break;
default:
surfaceView = new SurfaceView(context);
break;
}
surfaceView.setLayoutParams(params);
contentFrame.addView(surfaceView, 0);
} else {
@ -469,6 +484,8 @@ public class PlayerView extends FrameLayout {
oldVideoComponent.removeVideoListener(componentListener);
if (surfaceView instanceof TextureView) {
oldVideoComponent.clearVideoTextureView((TextureView) surfaceView);
} else if (surfaceView instanceof SphericalSurfaceView) {
oldVideoComponent.clearVideoSurface(((SphericalSurfaceView) surfaceView).getSurface());
} else if (surfaceView instanceof SurfaceView) {
oldVideoComponent.clearVideoSurfaceView((SurfaceView) surfaceView);
}
@ -493,6 +510,8 @@ public class PlayerView extends FrameLayout {
if (newVideoComponent != null) {
if (surfaceView instanceof TextureView) {
newVideoComponent.setVideoTextureView((TextureView) surfaceView);
} else if (surfaceView instanceof SphericalSurfaceView) {
newVideoComponent.setVideoSurface(((SphericalSurfaceView) surfaceView).getSurface());
} else if (surfaceView instanceof SurfaceView) {
newVideoComponent.setVideoSurfaceView((SurfaceView) surfaceView);
}
@ -636,7 +655,7 @@ public class PlayerView extends FrameLayout {
* Sets whether a buffering spinner is displayed when the player is in the buffering state. The
* buffering spinner is not displayed by default.
*
* @param showBuffering Whether the buffering icon is displayer
* @param showBuffering Whether the buffering icon is displayed
*/
public void setShowBuffering(boolean showBuffering) {
if (this.showBuffering != showBuffering) {
@ -913,10 +932,12 @@ public class PlayerView extends FrameLayout {
* <li>{@link SurfaceView} by default, or if the {@code surface_type} attribute is set to {@code
* surface_view}.
* <li>{@link TextureView} if {@code surface_type} is {@code texture_view}.
* <li>{@link SphericalSurfaceView} if {@code surface_type} is {@code spherical_view}.
* <li>{@code null} if {@code surface_type} is {@code none}.
* </ul>
*
* @return The {@link SurfaceView}, {@link TextureView} or {@code null}.
* @return The {@link SurfaceView}, {@link TextureView}, {@link SphericalSurfaceView} or {@code
* null}.
*/
public View getVideoSurfaceView() {
return surfaceView;
@ -965,6 +986,32 @@ public class PlayerView extends FrameLayout {
return true;
}
/**
* Should be called when the player is visible to the user and if {@code surface_type} is {@code
* spherical_view}. It is the counterpart to {@link #onPause()}.
*
* <p>This method should typically be called in {@link Activity#onStart()} (or {@link
* Activity#onResume()} for API version <= 23).
*/
public void onResume() {
if (surfaceView instanceof SphericalSurfaceView) {
((SphericalSurfaceView) surfaceView).onResume();
}
}
/**
* Should be called when the player is no longer visible to the user and if {@code surface_type}
* is {@code spherical_view}. It is the counterpart to {@link #onResume()}.
*
* <p>This method should typically be called in {@link Activity#onStop()} (or {@link
* Activity#onPause()} for API version <= 23).
*/
public void onPause() {
if (surfaceView instanceof SphericalSurfaceView) {
((SphericalSurfaceView) surfaceView).onPause();
}
}
/** Shows the playback controls, but only if forced or shown indefinitely. */
private void maybeShowController(boolean isForced) {
if (isPlayingAd() && controllerHideDuringAds) {
@ -1180,7 +1227,11 @@ public class PlayerView extends FrameLayout {
}
private final class ComponentListener
implements Player.EventListener, TextOutput, VideoListener, OnLayoutChangeListener {
implements Player.EventListener,
TextOutput,
VideoListener,
OnLayoutChangeListener,
SphericalSurfaceView.SurfaceListener {
// TextOutput implementation
@ -1219,6 +1270,8 @@ public class PlayerView extends FrameLayout {
surfaceView.addOnLayoutChangeListener(this);
}
applyTextureViewRotation((TextureView) surfaceView, textureViewRotation);
} else if (surfaceView instanceof SphericalSurfaceView) {
videoAspectRatio = 0;
}
contentFrame.setAspectRatio(videoAspectRatio);
@ -1271,5 +1324,17 @@ public class PlayerView extends FrameLayout {
int oldBottom) {
applyTextureViewRotation((TextureView) view, textureViewRotation);
}
// SphericalSurfaceView.SurfaceTextureListener implementation
@Override
public void surfaceChanged(@Nullable Surface surface) {
if (player != null) {
VideoComponent videoComponent = player.getVideoComponent();
if (videoComponent != null) {
videoComponent.setVideoSurface(surface);
}
}
}
}
}

View File

@ -0,0 +1,303 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.android.exoplayer2.ui.spherical.Utils.checkGlError;
import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import com.google.android.exoplayer2.C;
import java.nio.FloatBuffer;
/**
* Utility class to generate & render spherical meshes for video or images. Use the static creation
* methods to construct the Mesh's data. Then call the Mesh constructor on the GL thread when ready.
* Use glDraw method to render it.
*/
@TargetApi(15)
/*package*/ final class Mesh {
/** Defines the constants identifying the current eye type. */
public interface EyeType {
/** Single eye in monocular rendering. */
int MONOCULAR = 0;
/** The left eye in stereo rendering. */
int LEFT = 1;
/** The right eye in stereo rendering. */
int RIGHT = 2;
}
/** Standard media where a single camera frame takes up the entire media frame. */
public static final int MEDIA_MONOSCOPIC = 0;
/**
* Stereo media where the left & right halves of the frame are rendered for the left & right eyes,
* respectively. If the stereo media is rendered in a non-VR display, only the left half is used.
*/
public static final int MEDIA_STEREO_LEFT_RIGHT = 1;
/**
* Stereo media where the top & bottom halves of the frame are rendered for the left & right eyes,
* respectively. If the stereo media is rendered in a non-VR display, only the top half is used.
*/
public static final int MEDIA_STEREO_TOP_BOTTOM = 2;
// Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data.
private static final String[] VERTEX_SHADER_CODE =
new String[] {
"uniform mat4 uMvpMatrix;",
"attribute vec4 aPosition;",
"attribute vec2 aTexCoords;",
"varying vec2 vTexCoords;",
// Standard transformation.
"void main() {",
" gl_Position = uMvpMatrix * aPosition;",
" vTexCoords = aTexCoords;",
"}"
};
private static final String[] FRAGMENT_SHADER_CODE =
new String[] {
// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.
"#extension GL_OES_EGL_image_external : require",
"precision mediump float;",
// Standard texture rendering shader.
"uniform samplerExternalOES uTexture;",
"varying vec2 vTexCoords;",
"void main() {",
" gl_FragColor = texture2D(uTexture, vTexCoords);",
"}"
};
// Constants related to vertex data.
private static final int POSITION_COORDS_PER_VERTEX = 3; // X, Y, Z.
// The vertex contains texture coordinates for both the left & right eyes. If the scene is
// rendered in VR, the appropriate part of the vertex will be selected at runtime. For a mono
// scene, only the left eye's UV coordinates are used.
// For mono media, the UV coordinates are duplicated in each. For stereo media, the UV coords
// point to the appropriate part of the source media.
private static final int TEXTURE_COORDS_PER_VERTEX = 2 * 2;
private static final int COORDS_PER_VERTEX =
POSITION_COORDS_PER_VERTEX + TEXTURE_COORDS_PER_VERTEX;
// Data is tightly packed. Each vertex is [x, y, z, u_left, v_left, u_right, v_right].
private static final int VERTEX_STRIDE_BYTES = COORDS_PER_VERTEX * C.BYTES_PER_FLOAT;
// Vertices for the mesh with 3D position + left 2D texture UV + right 2D texture UV.
private final int vertixCount;
private final FloatBuffer vertexBuffer;
// Program related GL items. These are only valid if program != 0.
private int program;
private int mvpMatrixHandle;
private int positionHandle;
private int texCoordsHandle;
private int textureHandle;
/**
* Generates a 3D UV sphere for rendering monoscopic or stereoscopic video.
*
* <p>This can be called on any thread. The returned {@link Mesh} isn't valid until {@link
* #init()} is called.
*
* @param radius Size of the sphere. Must be > 0.
* @param latitudes Number of rows that make up the sphere. Must be >= 1.
* @param longitudes Number of columns that make up the sphere. Must be >= 1.
* @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in
* (0, 180].
* @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be
* in (0, 360].
* @param mediaFormat A MEDIA_* value.
* @return Unintialized Mesh.
*/
public static Mesh createUvSphere(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees,
int mediaFormat) {
return new Mesh(
createUvSphereVertexData(
radius, latitudes, longitudes, verticalFovDegrees, horizontalFovDegrees, mediaFormat));
}
/** Used by static constructors. */
private Mesh(float[] vertexData) {
vertixCount = vertexData.length / COORDS_PER_VERTEX;
vertexBuffer = Utils.createBuffer(vertexData);
}
/** Initializes of the GL components. */
/* package */ void init() {
program = Utils.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE);
mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix");
positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
texCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords");
textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
}
/**
* Renders the mesh. This must be called on the GL thread.
*
* @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh.
* @param mvpMatrix The Model View Projection matrix.
* @param eyeType An {@link EyeType} value.
*/
/* package */ void draw(int textureId, float[] mvpMatrix, int eyeType) {
// Configure shader.
GLES20.glUseProgram(program);
checkGlError();
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glEnableVertexAttribArray(texCoordsHandle);
checkGlError();
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES20.glUniform1i(textureHandle, 0);
checkGlError();
// Load position data.
vertexBuffer.position(0);
GLES20.glVertexAttribPointer(
positionHandle,
POSITION_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
VERTEX_STRIDE_BYTES,
vertexBuffer);
checkGlError();
// Load texture data. Eye.Type.RIGHT uses the left eye's data.
int textureOffset =
(eyeType == EyeType.RIGHT) ? POSITION_COORDS_PER_VERTEX + 2 : POSITION_COORDS_PER_VERTEX;
vertexBuffer.position(textureOffset);
GLES20.glVertexAttribPointer(
texCoordsHandle,
TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
VERTEX_STRIDE_BYTES,
vertexBuffer);
checkGlError();
// Render.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertixCount);
checkGlError();
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(texCoordsHandle);
}
/** Cleans up the GL resources. */
/* package */ void shutdown() {
if (program != 0) {
GLES20.glDeleteProgram(program);
}
}
// @VisibleForTesting
/*package*/ static float[] createUvSphereVertexData(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees,
int mediaFormat) {
if (radius <= 0
|| latitudes < 1
|| longitudes < 1
|| verticalFovDegrees <= 0
|| verticalFovDegrees > 180
|| horizontalFovDegrees <= 0
|| horizontalFovDegrees > 360) {
throw new IllegalArgumentException("Invalid parameters for sphere.");
}
// Compute angular size in radians of each UV quad.
float verticalFovRads = (float) Math.toRadians(verticalFovDegrees);
float horizontalFovRads = (float) Math.toRadians(horizontalFovDegrees);
float quadHeightRads = verticalFovRads / latitudes;
float quadWidthRads = horizontalFovRads / longitudes;
// Each latitude strip has 2 * (longitudes quads + extra edge) vertices + 2 degenerate vertices.
int vertexCount = (2 * (longitudes + 1) + 2) * latitudes;
// Buffer to return.
float[] vertexData = new float[vertexCount * COORDS_PER_VERTEX];
// Generate the data for the sphere which is a set of triangle strips representing each
// latitude band.
int offset = 0; // Offset into the vertexData array.
// (i, j) represents a quad in the equirectangular sphere.
for (int j = 0; j < latitudes; ++j) { // For each horizontal triangle strip.
// Each latitude band lies between the two phi values. Each vertical edge on a band lies on
// a theta value.
float phiLow = (quadHeightRads * j - verticalFovRads / 2);
float phiHigh = (quadHeightRads * (j + 1) - verticalFovRads / 2);
for (int i = 0; i < longitudes + 1; ++i) { // For each vertical edge in the band.
for (int k = 0; k < 2; ++k) { // For low and high points on an edge.
// For each point, determine it's position in polar coordinates.
float phi = (k == 0) ? phiLow : phiHigh;
float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2;
// Set vertex position data as Cartesian coordinates.
vertexData[offset + 0] = -(float) (radius * Math.sin(theta) * Math.cos(phi));
vertexData[offset + 1] = (float) (radius * Math.sin(phi));
vertexData[offset + 2] = (float) (radius * Math.cos(theta) * Math.cos(phi));
// Set vertex texture.x data.
if (mediaFormat == MEDIA_STEREO_LEFT_RIGHT) {
// For left-right media, each eye's x coordinate points to the left or right half of the
// texture.
vertexData[offset + 3] = (i * quadWidthRads / horizontalFovRads) / 2;
vertexData[offset + 5] = (i * quadWidthRads / horizontalFovRads) / 2 + .5f;
} else {
// For top-bottom or monoscopic media, the eye's x spans the full width of the texture.
vertexData[offset + 3] = i * quadWidthRads / horizontalFovRads;
vertexData[offset + 5] = i * quadWidthRads / horizontalFovRads;
}
// Set vertex texture.y data. The "1 - ..." is due to Canvas vs GL coords.
if (mediaFormat == MEDIA_STEREO_TOP_BOTTOM) {
// For top-bottom media, each eye's y coordinate points to the top or bottom half of the
// texture.
vertexData[offset + 4] = 1 - (((j + k) * quadHeightRads / verticalFovRads) / 2 + .5f);
vertexData[offset + 6] = 1 - ((j + k) * quadHeightRads / verticalFovRads) / 2;
} else {
// For left-right or monoscopic media, the eye's y spans the full height of the texture.
vertexData[offset + 4] = 1 - (j + k) * quadHeightRads / verticalFovRads;
vertexData[offset + 6] = 1 - (j + k) * quadHeightRads / verticalFovRads;
}
offset += COORDS_PER_VERTEX;
// Break up the triangle strip with degenerate vertices by copying first and last points.
if ((i == 0 && k == 0) || (i == longitudes && k == 1)) {
System.arraycopy(
vertexData, offset - COORDS_PER_VERTEX, vertexData, offset, COORDS_PER_VERTEX);
offset += COORDS_PER_VERTEX;
}
}
// Move on to the next vertical edge in the triangle strip.
}
// Move on to the next triangle strip.
}
return vertexData;
}
}

View File

@ -0,0 +1,94 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.android.exoplayer2.ui.spherical.Utils.checkGlError;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.ui.spherical.Mesh.EyeType;
import com.google.android.exoplayer2.util.Assertions;
import java.util.concurrent.atomic.AtomicBoolean;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Renders a GL Scene.
*
* <p>All methods should be called only on the GL thread unless GL thread is stopped.
*/
/*package*/ final class SceneRenderer {
private final AtomicBoolean frameAvailable;
private int textureId;
@Nullable private SurfaceTexture surfaceTexture;
@MonotonicNonNull private Mesh mesh;
private boolean meshInitialized;
public SceneRenderer() {
frameAvailable = new AtomicBoolean();
}
/** Initializes the renderer. */
public SurfaceTexture init() {
// Set the background frame color. This is only visible if the display mesh isn't a full sphere.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
checkGlError();
textureId = Utils.createExternalTexture();
surfaceTexture = new SurfaceTexture(textureId);
surfaceTexture.setOnFrameAvailableListener(surfaceTexture -> frameAvailable.set(true));
return surfaceTexture;
}
/** Sets a {@link Mesh} to be used to display video. */
public void setMesh(Mesh mesh) {
if (this.mesh != null) {
this.mesh.shutdown();
}
this.mesh = mesh;
meshInitialized = false;
}
/**
* Draws the scene with a given eye pose and type.
*
* @param viewProjectionMatrix 16 element GL matrix.
* @param eyeType an {@link EyeType} value
*/
public void drawFrame(float[] viewProjectionMatrix, int eyeType) {
if (mesh == null) {
return;
}
if (!meshInitialized) {
meshInitialized = true;
mesh.init();
}
// glClear isn't strictly necessary when rendering fully spherical panoramas, but it can improve
// performance on tiled renderers by causing the GPU to discard previous data.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
checkGlError();
if (frameAvailable.compareAndSet(true, false)) {
Assertions.checkNotNull(surfaceTexture).updateTexImage();
checkGlError();
}
mesh.draw(textureId, viewProjectionMatrix, eyeType);
}
}

View File

@ -0,0 +1,492 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.PointF;
import android.graphics.SurfaceTexture;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.AnyThread;
import android.support.annotation.BinderThread;
import android.support.annotation.Nullable;
import android.support.annotation.UiThread;
import android.util.AttributeSet;
import android.view.Display;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
import com.google.android.exoplayer2.ui.spherical.Mesh.EyeType;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* Renders a GL scene in a non-VR Activity that is affected by phone orientation and touch input.
*
* <p>The two input components are the TYPE_GAME_ROTATION_VECTOR Sensor and a TouchListener. The GL
* renderer combines these two inputs to render a scene with the appropriate camera orientation.
*
* <p>The primary complexity in this class is related to the various rotations. It is important to
* apply the touch and sensor rotations in the correct order or the user's touch manipulations won't
* match what they expect.
*/
@TargetApi(15)
public final class SphericalSurfaceView extends GLSurfaceView {
/**
* This listener can be used to be notified when the {@link Surface} associated with this view is
* changed.
*/
public interface SurfaceListener {
/**
* Invoked when the surface is changed or there isn't one anymore. Any previous surface
* shouldn't be used after this call.
*
* @param surface The new surface or null if there isn't one anymore.
*/
void surfaceChanged(@Nullable Surface surface);
}
// A spherical mesh for video should be large enough that there are no stereo artifacts.
private static final int SPHERE_RADIUS_METERS = 50;
// TODO These should be configured based on the video type. It's assumed 360 video here.
private static final int DEFAULT_SPHERE_HORIZONTAL_DEGREES = 360;
private static final int DEFAULT_SPHERE_VERTICAL_DEGREES = 180;
// The 360 x 180 sphere has 5 degree quads. Increase these if lines in videos look wavy.
private static final int DEFAULT_SPHERE_COLUMNS = 72;
private static final int DEFAULT_SPHERE_ROWS = 36;
// Arbitrary vertical field of view.
private static final int FIELD_OF_VIEW_DEGREES = 90;
private static final float Z_NEAR = .1f;
private static final float Z_FAR = 100;
// Arbitrary touch speed number. This should be tweaked so the scene smoothly follows the
// finger or derived from DisplayMetrics.
private static final float PX_PER_DEGREES = 25;
// Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations
// where the touch-based pitch and gyro-based pitch interact badly near the poles.
private static final float MAX_PITCH_DEGREES = 45;
private static final float UPRIGHT_ROLL = (float) Math.PI;
private final SensorManager sensorManager;
private final @Nullable Sensor orientationSensor;
private final PhoneOrientationListener phoneOrientationListener;
private final Renderer renderer;
private final Handler mainHandler;
private @Nullable SurfaceListener surfaceListener;
private @Nullable SurfaceTexture surfaceTexture;
private @Nullable Surface surface;
public SphericalSurfaceView(Context context) {
this(context, null);
}
public SphericalSurfaceView(Context context, @Nullable AttributeSet attributeSet) {
super(context, attributeSet);
mainHandler = new Handler(Looper.getMainLooper());
// Configure sensors and touch.
sensorManager =
(SensorManager) Assertions.checkNotNull(context.getSystemService(Context.SENSOR_SERVICE));
// TYPE_GAME_ROTATION_VECTOR is the easiest sensor since it handles all the complex math for
// fusion. It's used instead of TYPE_ROTATION_VECTOR since the latter uses the magnetometer on
// devices. When used indoors, the magnetometer can take some time to settle depending on the
// device and amount of metal in the environment.
int type = Util.SDK_INT >= 18 ? Sensor.TYPE_GAME_ROTATION_VECTOR : Sensor.TYPE_ROTATION_VECTOR;
orientationSensor = sensorManager.getDefaultSensor(type);
renderer = new Renderer();
TouchTracker touchTracker = new TouchTracker(renderer);
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = Assertions.checkNotNull(windowManager).getDefaultDisplay();
phoneOrientationListener = new PhoneOrientationListener(display, touchTracker, renderer);
setEGLContextClientVersion(2);
setRenderer(renderer);
setOnTouchListener(touchTracker);
Mesh mesh =
Mesh.createUvSphere(
SPHERE_RADIUS_METERS,
DEFAULT_SPHERE_ROWS,
DEFAULT_SPHERE_COLUMNS,
DEFAULT_SPHERE_VERTICAL_DEGREES,
DEFAULT_SPHERE_HORIZONTAL_DEGREES,
Mesh.MEDIA_MONOSCOPIC);
queueEvent(() -> renderer.scene.setMesh(mesh));
}
/** Returns the {@link Surface} associated with this view. */
public @Nullable Surface getSurface() {
return surface;
}
/**
* Sets the {@link SurfaceListener} used to listen to surface events.
*
* @param listener The listener for surface events.
*/
public void setSurfaceListener(@Nullable SurfaceListener listener) {
surfaceListener = listener;
}
@Override
public void onResume() {
super.onResume();
if (orientationSensor != null) {
sensorManager.registerListener(
phoneOrientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST);
}
}
@Override
public void onPause() {
if (orientationSensor != null) {
sensorManager.unregisterListener(phoneOrientationListener);
}
super.onPause();
}
@Override
protected void onDetachedFromWindow() {
// This call stops GL thread.
super.onDetachedFromWindow();
// Post to make sure we occur in order with any onSurfaceTextureAvailable calls.
mainHandler.post(
() -> {
if (surface != null) {
if (surfaceListener != null) {
surfaceListener.surfaceChanged(null);
}
releaseSurface(surfaceTexture, surface);
surfaceTexture = null;
surface = null;
}
});
}
// Called on GL thread.
private void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture) {
mainHandler.post(
() -> {
SurfaceTexture oldSurfaceTexture = this.surfaceTexture;
Surface oldSurface = this.surface;
this.surfaceTexture = surfaceTexture;
this.surface = new Surface(surfaceTexture);
if (surfaceListener != null) {
surfaceListener.surfaceChanged(surface);
}
releaseSurface(oldSurfaceTexture, oldSurface);
});
}
private static void releaseSurface(
@Nullable SurfaceTexture oldSurfaceTexture, @Nullable Surface oldSurface) {
if (oldSurfaceTexture != null) {
oldSurfaceTexture.release();
}
if (oldSurface != null) {
oldSurface.release();
}
}
/** Detects sensor events and saves them as a matrix. */
private static class PhoneOrientationListener implements SensorEventListener {
private final float[] phoneInWorldSpaceMatrix = new float[16];
private final float[] remappedPhoneMatrix = new float[16];
private final float[] angles = new float[3];
private final Display display;
private final TouchTracker touchTracker;
private final Renderer renderer;
public PhoneOrientationListener(Display display, TouchTracker touchTracker, Renderer renderer) {
this.display = display;
this.touchTracker = touchTracker;
this.renderer = renderer;
}
@Override
@BinderThread
public void onSensorChanged(SensorEvent event) {
SensorManager.getRotationMatrixFromVector(remappedPhoneMatrix, event.values);
// If we're not in upright portrait mode, remap the axes of the coordinate system according to
// the display rotation.
int xAxis;
int yAxis;
switch (display.getRotation()) {
case Surface.ROTATION_270:
xAxis = SensorManager.AXIS_MINUS_Y;
yAxis = SensorManager.AXIS_X;
break;
case Surface.ROTATION_180:
xAxis = SensorManager.AXIS_MINUS_X;
yAxis = SensorManager.AXIS_MINUS_Y;
break;
case Surface.ROTATION_90:
xAxis = SensorManager.AXIS_Y;
yAxis = SensorManager.AXIS_MINUS_X;
break;
case Surface.ROTATION_0:
default:
xAxis = SensorManager.AXIS_X;
yAxis = SensorManager.AXIS_Y;
break;
}
SensorManager.remapCoordinateSystem(
remappedPhoneMatrix, xAxis, yAxis, phoneInWorldSpaceMatrix);
// Extract the phone's roll and pass it on to touchTracker & renderer. Remapping is required
// since we need the calculated roll of the phone to be independent of the phone's pitch &
// yaw. Any operation that decomposes rotation to Euler angles needs to be performed
// carefully.
SensorManager.remapCoordinateSystem(
phoneInWorldSpaceMatrix,
SensorManager.AXIS_X,
SensorManager.AXIS_MINUS_Z,
remappedPhoneMatrix);
SensorManager.getOrientation(remappedPhoneMatrix, angles);
float roll = angles[2];
touchTracker.setRoll(roll);
// Rotate from Android coordinates to OpenGL coordinates. Android's coordinate system
// assumes Y points North and Z points to the sky. OpenGL has Y pointing up and Z pointing
// toward the user.
Matrix.rotateM(phoneInWorldSpaceMatrix, 0, 90, 1, 0, 0);
renderer.setDeviceOrientation(phoneInWorldSpaceMatrix, roll);
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
}
/**
* Basic touch input system.
*
* <p>Mixing touch input and gyro input results in a complicated UI so this should be used
* carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works
* for basic UI but fails in edge cases where the user tries to drag scene up or down. There is no
* good UX solution for this. The least bad solution is to disable pitch manipulation and only let
* the user adjust yaw. This example tries to limit the awkwardness by restricting pitch
* manipulation to +/- 45 degrees.
*
* <p>It is also important to get the order of operations correct. To match what users expect,
* touch interaction manipulates the scene by rotating the world by the yaw offset and tilting the
* camera by the pitch offset. If the order of operations is incorrect, the sensors & touch
* rotations will have strange interactions. The roll of the phone is also tracked so that the x &
* y are correctly mapped to yaw & pitch no matter how the user holds their phone.
*
* <p>This class doesn't handle any scrolling inertia but Android's
* com.google.vr.sdk.widgets.common.TouchTracker.FlingGestureListener can be used with this code
* for a nicer UI. An even more advanced UI would reproject the user's touch point into 3D and
* drag the Mesh as the user moves their finger. However, that requires quaternion interpolation
* and is beyond the scope of this sample.
*/
// @VisibleForTesting
/*package*/ static class TouchTracker implements OnTouchListener {
// With every touch event, update the accumulated degrees offset by the new pixel amount.
private final PointF previousTouchPointPx = new PointF();
private final PointF accumulatedTouchOffsetDegrees = new PointF();
// The conversion from touch to yaw & pitch requires compensating for device roll. This is set
// on the sensor thread and read on the UI thread.
private volatile float roll;
private final Renderer renderer;
public TouchTracker(Renderer renderer) {
this.renderer = renderer;
roll = UPRIGHT_ROLL;
}
/**
* Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll.
*
* @return true if we handled the event
*/
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Initialize drag gesture.
previousTouchPointPx.set(event.getX(), event.getY());
return true;
case MotionEvent.ACTION_MOVE:
// Calculate the touch delta in screen space.
float touchX = (event.getX() - previousTouchPointPx.x) / PX_PER_DEGREES;
float touchY = (event.getY() - previousTouchPointPx.y) / PX_PER_DEGREES;
previousTouchPointPx.set(event.getX(), event.getY());
float r = roll; // Copy volatile state.
float cr = (float) Math.cos(r);
float sr = (float) Math.sin(r);
// To convert from screen space to the 3D space, we need to adjust the drag vector based
// on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has
// an inverted y-axis due to the screen-space coordinates vs GL coordinates.
// Handle yaw.
accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY;
// Handle pitch and limit it to 45 degrees.
accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY;
accumulatedTouchOffsetDegrees.y =
Math.max(
-MAX_PITCH_DEGREES, Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y));
renderer.setPitchOffset(accumulatedTouchOffsetDegrees.y);
renderer.setYawOffset(accumulatedTouchOffsetDegrees.x);
return true;
default:
return false;
}
}
@BinderThread
public void setRoll(float roll) {
// We compensate for roll by rotating in the opposite direction.
this.roll = -roll;
}
}
/**
* Standard GL Renderer implementation. The notable code is the matrix multiplication in
* onDrawFrame and updatePitchMatrix.
*/
// @VisibleForTesting
/*package*/ class Renderer implements GLSurfaceView.Renderer {
private final SceneRenderer scene;
private final float[] projectionMatrix = new float[16];
// There is no model matrix for this scene so viewProjectionMatrix is used for the mvpMatrix.
private final float[] viewProjectionMatrix = new float[16];
// Device orientation is derived from sensor data. This is accessed in the sensor's thread and
// the GL thread.
private final float[] deviceOrientationMatrix = new float[16];
// Optional pitch and yaw rotations are applied to the sensor orientation. These are accessed on
// the UI, sensor and GL Threads.
private final float[] touchPitchMatrix = new float[16];
private final float[] touchYawMatrix = new float[16];
private float touchPitch;
private float deviceRoll;
// viewMatrix = touchPitch * deviceOrientation * touchYaw.
private final float[] viewMatrix = new float[16];
private final float[] tempMatrix = new float[16];
public Renderer() {
scene = new SceneRenderer();
Matrix.setIdentityM(deviceOrientationMatrix, 0);
Matrix.setIdentityM(touchPitchMatrix, 0);
Matrix.setIdentityM(touchYawMatrix, 0);
deviceRoll = UPRIGHT_ROLL;
}
@Override
public synchronized void onSurfaceCreated(GL10 gl, EGLConfig config) {
onSurfaceTextureAvailable(scene.init());
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
float aspect = (float) width / height;
float fovY = calculateFieldOfViewInYDirection(aspect);
Matrix.perspectiveM(projectionMatrix, 0, fovY, aspect, Z_NEAR, Z_FAR);
}
@Override
public void onDrawFrame(GL10 gl) {
// Combine touch & sensor data.
// Orientation = pitch * sensor * yaw since that is closest to what most users expect the
// behavior to be.
synchronized (this) {
Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0);
Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0);
}
Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
scene.drawFrame(viewProjectionMatrix, EyeType.MONOCULAR);
}
/** Adjusts the GL camera's rotation based on device rotation. Runs on the sensor thread. */
@BinderThread
public synchronized void setDeviceOrientation(float[] matrix, float deviceRoll) {
System.arraycopy(matrix, 0, deviceOrientationMatrix, 0, deviceOrientationMatrix.length);
this.deviceRoll = -deviceRoll;
updatePitchMatrix();
}
/**
* Updates the pitch matrix after a physical rotation or touch input. The pitch matrix rotation
* is applied on an axis that is dependent on device rotation so this must be called after
* either touch or sensor update.
*/
@AnyThread
private void updatePitchMatrix() {
// The camera's pitch needs to be rotated along an axis that is parallel to the real world's
// horizon. This is the <1, 0, 0> axis after compensating for the device's roll.
Matrix.setRotateM(
touchPitchMatrix,
0,
-touchPitch,
(float) Math.cos(deviceRoll),
(float) Math.sin(deviceRoll),
0);
}
/** Set the pitch offset matrix. */
@UiThread
public synchronized void setPitchOffset(float pitchDegrees) {
touchPitch = pitchDegrees;
updatePitchMatrix();
}
/** Set the yaw offset matrix. */
@UiThread
public synchronized void setYawOffset(float yawDegrees) {
Matrix.setRotateM(touchYawMatrix, 0, -yawDegrees, 0, 1, 0);
}
private float calculateFieldOfViewInYDirection(float aspect) {
boolean landscapeMode = aspect > 1;
if (landscapeMode) {
double halfFovX = FIELD_OF_VIEW_DEGREES / 2;
double tanY = Math.tan(Math.toRadians(halfFovX)) / aspect;
double halfFovY = Math.toDegrees(Math.atan(tanY));
return (float) (halfFovY * 2);
} else {
return FIELD_OF_VIEW_DEGREES;
}
}
}
}

View File

@ -0,0 +1,131 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static android.opengl.GLU.gluErrorString;
import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.text.TextUtils;
import android.util.Log;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
/** GL utility methods. */
/*package*/ final class Utils {
private static final String TAG = "Spherical.Utils";
/** Class only contains static methods. */
private Utils() {}
/**
* If there is an OpenGl error, logs the error and if {@link
* ExoPlayerLibraryInfo#GL_ASSERTIONS_ENABLED} is true throws a {@link RuntimeException}.
*/
public static void checkGlError() {
int error = GLES20.glGetError();
int lastError;
if (error != GLES20.GL_NO_ERROR) {
do {
lastError = error;
Log.e(TAG, "glError " + gluErrorString(lastError));
error = GLES20.glGetError();
} while (error != GLES20.GL_NO_ERROR);
if (ExoPlayerLibraryInfo.GL_ASSERTIONS_ENABLED) {
throw new RuntimeException("glError " + gluErrorString(lastError));
}
}
}
/**
* Builds a GL shader program from vertex & fragment shader code. The vertex and fragment shaders
* are passed as arrays of strings in order to make debugging compilation issues easier.
*
* @param vertexCode GLES20 vertex shader program.
* @param fragmentCode GLES20 fragment shader program.
* @return GLES20 program id.
*/
public static int compileProgram(String[] vertexCode, String[] fragmentCode) {
checkGlError();
// prepare shaders and OpenGL program
int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vertexShader, TextUtils.join("\n", vertexCode));
GLES20.glCompileShader(vertexShader);
checkGlError();
int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fragmentShader, TextUtils.join("\n", fragmentCode));
GLES20.glCompileShader(fragmentShader);
checkGlError();
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
// Link and check for errors.
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
String errorMsg = "Unable to link shader program: \n" + GLES20.glGetProgramInfoLog(program);
Log.e(TAG, errorMsg);
if (ExoPlayerLibraryInfo.GL_ASSERTIONS_ENABLED) {
throw new RuntimeException(errorMsg);
}
}
checkGlError();
return program;
}
/** Allocates a FloatBuffer with the given data. */
public static FloatBuffer createBuffer(float[] data) {
ByteBuffer bb = ByteBuffer.allocateDirect(data.length * C.BYTES_PER_FLOAT);
bb.order(ByteOrder.nativeOrder());
FloatBuffer buffer = bb.asFloatBuffer();
buffer.put(data);
buffer.position(0);
return buffer;
}
/**
* Creates a GL_TEXTURE_EXTERNAL_OES with default configuration of GL_LINEAR filtering and
* GL_CLAMP_TO_EDGE wrapping.
*/
@TargetApi(15)
public static int createExternalTexture() {
int[] texId = new int[1];
GLES20.glGenTextures(1, IntBuffer.wrap(texId));
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId[0]);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(
GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError();
return texId[0];
}
}

View File

@ -29,6 +29,7 @@
<enum name="none" value="0"/>
<enum name="surface_view" value="1"/>
<enum name="texture_view" value="2"/>
<enum name="spherical_view" value="3"/>
</attr>
<attr name="show_timeout" format="integer"/>
<attr name="rewind_increment" format="integer"/>

View File

@ -0,0 +1,156 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests for {@link Mesh}. */
@RunWith(RobolectricTestRunner.class)
public class MeshTest {
private static final float EPSILON = .00001f;
// This is a copy of Mesh.COORDS_PER_VERTEX which is private.
private static final int COORDS_PER_VERTEX = 7;
// Default 360 sphere.
private static final float RADIUS = 1;
private static final int LATITUDES = 12;
private static final int LONGITUDES = 24;
private static final float VERTICAL_FOV_DEGREES = 180;
private static final float HORIZONTAL_FOV_DEGREES = 360;
@Test
public void testSphericalMesh() throws Exception {
// Only the first param is important in this test.
float[] data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LONGITUDES,
VERTICAL_FOV_DEGREES,
HORIZONTAL_FOV_DEGREES,
Mesh.MEDIA_STEREO_TOP_BOTTOM);
assertThat(data.length).isGreaterThan(LATITUDES * LONGITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length / COORDS_PER_VERTEX; ++i) {
float x = data[i * COORDS_PER_VERTEX + 0];
float y = data[i * COORDS_PER_VERTEX + 1];
float z = data[i * COORDS_PER_VERTEX + 2];
assertEquals(RADIUS, Math.sqrt(x * x + y * y + z * z), EPSILON);
}
}
@Test
public void testMeshTextureCoordinates() throws Exception {
// 360 mono video.
float[] data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LONGITUDES,
VERTICAL_FOV_DEGREES,
HORIZONTAL_FOV_DEGREES,
Mesh.MEDIA_MONOSCOPIC);
// There should be more vertices than quads.
assertThat(data.length).isGreaterThan(LATITUDES * LONGITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// For monoscopic meshes, the (3, 4) and (5, 6) tex coords in each vertex should be the same.
assertEquals(data[i + 3], data[i + 5], EPSILON);
assertEquals(data[i + 4], data[i + 6], EPSILON);
}
// Hemispherical stereo where longitudes := latitudes. This is not exactly Wally format, but
// it's close.
data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LATITUDES,
VERTICAL_FOV_DEGREES,
VERTICAL_FOV_DEGREES,
Mesh.MEDIA_STEREO_LEFT_RIGHT);
assertThat(data.length).isGreaterThan(LATITUDES * LATITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// U coordinates should be on the left & right halves of the texture.
assertThat(data[i + 3]).isAtMost(.5f);
assertThat(data[i + 5]).isAtLeast(.5f);
// V coordinates should be the same.
assertEquals(data[i + 4], data[i + 6], EPSILON);
}
// Flat stereo.
data =
Mesh.createUvSphereVertexData(
RADIUS,
1,
1, // Single quad.
30,
60, // Approximate "cinematic" screen.
Mesh.MEDIA_STEREO_TOP_BOTTOM);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// U coordinates should be the same
assertEquals(data[i + 3], data[i + 5], EPSILON);
// V coordinates should be on the top & bottom halves of the texture.
assertThat(data[i + 4]).isAtMost(.5f);
assertThat(data[i + 6]).isAtLeast(.5f);
}
}
@Test
public void testArgumentValidation() {
checkIllegalArgumentException(0, 1, 1, 1, 1);
checkIllegalArgumentException(1, 0, 1, 1, 1);
checkIllegalArgumentException(1, 1, 0, 1, 1);
checkIllegalArgumentException(1, 1, 1, 0, 1);
checkIllegalArgumentException(1, 1, 1, 181, 1);
checkIllegalArgumentException(1, 1, 1, 1, 0);
checkIllegalArgumentException(1, 1, 1, 1, 361);
}
private void checkIllegalArgumentException(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees) {
try {
Mesh.createUvSphereVertexData(
radius,
latitudes,
longitudes,
verticalFovDegrees,
horizontalFovDegrees,
Mesh.MEDIA_MONOSCOPIC);
fail();
} catch (IllegalArgumentException e) {
// Do nothing. Expected.
}
}
}

View File

@ -0,0 +1,156 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.common.truth.Truth.assertThat;
import android.view.MotionEvent;
import com.google.android.exoplayer2.ui.spherical.SphericalSurfaceView.TouchTracker;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests the interaction between the View's input (TouchTracker) and output (Renderer). */
@RunWith(RobolectricTestRunner.class)
public class SphericalSurfaceViewTouchTrackerTest {
private static final float EPSILON = 0.00001f;
private static final int SWIPE_PX = 100;
private static class MockRenderer extends SphericalSurfaceView.Renderer {
private float yaw;
private float pitch;
public MockRenderer() {
super(null);
}
@Override
public synchronized void setPitchOffset(float pitch) {
this.pitch = pitch;
}
@Override
public synchronized void setYawOffset(float yaw) {
this.yaw = yaw;
}
};
private final MockRenderer mockRenderer = new MockRenderer();
private TouchTracker tracker;
private static void swipe(TouchTracker tracker, float x0, float y0, float x1, float y1) {
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, x0, y0, 0));
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, x1, y1, 0));
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_UP, x1, y1, 0));
}
@Before
public void setUp() {
tracker = new TouchTracker(mockRenderer);
}
@Test
public void testTap() {
// Tap is a noop.
swipe(tracker, 0, 0, 0, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testBasicYaw() {
swipe(tracker, 0, 0, SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-SWIPE_PX / TouchTracker.PX_PER_DEGREES);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testBigYaw() {
swipe(tracker, 0, 0, -10 * SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isEqualTo(10 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testYawUnaffectedByPitch() {
swipe(tracker, 0, 0, 0, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
swipe(tracker, 0, 0, SWIPE_PX, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testBasicPitch() {
swipe(tracker, 0, 0, 0, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testPitchClipped() {
// Big reverse pitch should be clipped.
swipe(tracker, 0, 0, 0, -20 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isEqualTo(-TouchTracker.MAX_PITCH_DEGREES);
// Big forward pitch should be clipped.
swipe(tracker, 0, 0, 0, 50 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isEqualTo(TouchTracker.MAX_PITCH_DEGREES);
}
@Test
public void testWithRoll90() {
tracker.setRoll((float) Math.toRadians(90));
// Y-axis should now control yaw.
swipe(tracker, 0, 0, 0, 2 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// X-axis should now control reverse pitch.
swipe(tracker, 0, 0, -3 * SWIPE_PX, 0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testWithRoll180() {
tracker.setRoll((float) Math.toRadians(180));
// X-axis should now control reverse yaw.
swipe(tracker, 0, 0, -2 * SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// Y-axis should now control reverse pitch.
swipe(tracker, 0, 0, 0, -3 * SWIPE_PX);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testWithRoll270() {
tracker.setRoll((float) Math.toRadians(270));
// Y-axis should now control reverse yaw.
swipe(tracker, 0, 0, 0, -2 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// X-axis should now control pitch.
swipe(tracker, 0, 0, 3 * SWIPE_PX, 0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
}