Support VR180 videos

If available parse and use spherical metadata:
https://github.com/google/spatial-media/blob/master/docs/spherical-video-v2-rfc.md

RELNOTES=true

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=209754080
This commit is contained in:
eguven 2018-08-22 06:08:55 -07:00 committed by Oliver Woodman
parent 2b8938533f
commit 02a8964fe2
12 changed files with 982 additions and 346 deletions

View File

@ -91,7 +91,9 @@
* Allow setting the `Looper`, which is used to access the player, in
`ExoPlayerFactory` ([#4278](https://github.com/google/ExoPlayer/issues/4278)).
* Use default Deserializers if non given to DownloadManager.
* Add monoscopic 360 surface type to PlayerView.
* 360:
* Add monoscopic 360 surface type to PlayerView.
* Support VR180 videos.
* Deprecate `Player.DefaultEventListener` as selective listener overrides can
be directly made with the `Player.EventListener` interface.
* Deprecate `DefaultAnalyticsListener` as selective listener overrides can be

View File

@ -567,6 +567,11 @@
"uri": "https://storage.googleapis.com/exoplayer-test-media-1/360/congo.mp4",
"spherical_stereo_mode": "top_bottom"
},
{
"name": "Sphericalv2 (180 top-bottom stereo)",
"uri": "https://storage.googleapis.com/exoplayer-test-media-1/360/sphericalv2.mp4",
"spherical_stereo_mode": "top_bottom"
},
{
"name": "Iceland (360 top-bottom stereo ts)",
"uri": "https://storage.googleapis.com/exoplayer-test-media-1/360/iceland0.ts",

View File

@ -190,7 +190,7 @@ public class PlayerActivity extends Activity
finish();
return;
}
((SphericalSurfaceView) playerView.getVideoSurfaceView()).setStereoMode(stereoMode);
((SphericalSurfaceView) playerView.getVideoSurfaceView()).setDefaultStereoMode(stereoMode);
}
if (savedInstanceState != null) {

View File

@ -0,0 +1,234 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.video.spherical;
import android.support.annotation.IntDef;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.StereoMode;
import com.google.android.exoplayer2.util.Assertions;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/** The projection mesh used with 360/VR videos. */
public final class Projection {
/** Enforces allowed (sub) mesh draw modes. */
@Retention(RetentionPolicy.SOURCE)
@IntDef({DRAW_MODE_TRIANGLES, DRAW_MODE_TRIANGLES_STRIP, DRAW_MODE_TRIANGLES_FAN})
public @interface DrawMode {}
/** Triangle draw mode. */
public static final int DRAW_MODE_TRIANGLES = 0;
/** Triangle strip draw mode. */
public static final int DRAW_MODE_TRIANGLES_STRIP = 1;
/** Triangle fan draw mode. */
public static final int DRAW_MODE_TRIANGLES_FAN = 2;
/** Number of position coordinates per vertex. */
public static final int TEXTURE_COORDS_PER_VERTEX = 2;
/** Number of texture coordinates per vertex. */
public static final int POSITION_COORDS_PER_VERTEX = 3;
/**
* Generates a complete sphere equirectangular projection.
*
* @param stereoMode A {@link C.StereoMode} value.
*/
public static Projection createEquirectangular(@C.StereoMode int stereoMode) {
return createEquirectangular(
/* radius= */ 50, // Should be large enough that there are no stereo artifacts.
/* latitudes= */ 36, // Should be large enough to prevent videos looking wavy.
/* longitudes= */ 72, // Should be large enough to prevent videos looking wavy.
/* verticalFovDegrees= */ 180,
/* horizontalFovDegrees= */ 360,
stereoMode);
}
/**
* Generates an equirectangular projection.
*
* @param radius Size of the sphere. Must be > 0.
* @param latitudes Number of rows that make up the sphere. Must be >= 1.
* @param longitudes Number of columns that make up the sphere. Must be >= 1.
* @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in
* (0, 180].
* @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be
* in (0, 360].
* @param stereoMode A {@link C.StereoMode} value.
* @return an equirectangular projection.
*/
public static Projection createEquirectangular(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees,
@C.StereoMode int stereoMode) {
Assertions.checkArgument(radius > 0);
Assertions.checkArgument(latitudes >= 1);
Assertions.checkArgument(longitudes >= 1);
Assertions.checkArgument(verticalFovDegrees > 0 && verticalFovDegrees <= 180);
Assertions.checkArgument(horizontalFovDegrees > 0 && horizontalFovDegrees <= 360);
// Compute angular size in radians of each UV quad.
float verticalFovRads = (float) Math.toRadians(verticalFovDegrees);
float horizontalFovRads = (float) Math.toRadians(horizontalFovDegrees);
float quadHeightRads = verticalFovRads / latitudes;
float quadWidthRads = horizontalFovRads / longitudes;
// Each latitude strip has 2 * (longitudes quads + extra edge) vertices + 2 degenerate vertices.
int vertexCount = (2 * (longitudes + 1) + 2) * latitudes;
// Buffer to return.
float[] vertexData = new float[vertexCount * POSITION_COORDS_PER_VERTEX];
float[] textureData = new float[vertexCount * TEXTURE_COORDS_PER_VERTEX];
// Generate the data for the sphere which is a set of triangle strips representing each
// latitude band.
int vOffset = 0; // Offset into the vertexData array.
int tOffset = 0; // Offset into the textureData array.
// (i, j) represents a quad in the equirectangular sphere.
for (int j = 0; j < latitudes; ++j) { // For each horizontal triangle strip.
// Each latitude band lies between the two phi values. Each vertical edge on a band lies on
// a theta value.
float phiLow = quadHeightRads * j - verticalFovRads / 2;
float phiHigh = quadHeightRads * (j + 1) - verticalFovRads / 2;
for (int i = 0; i < longitudes + 1; ++i) { // For each vertical edge in the band.
for (int k = 0; k < 2; ++k) { // For low and high points on an edge.
// For each point, determine it's position in polar coordinates.
float phi = k == 0 ? phiLow : phiHigh;
float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2;
// Set vertex position data as Cartesian coordinates.
vertexData[vOffset++] = -(float) (radius * Math.sin(theta) * Math.cos(phi));
vertexData[vOffset++] = (float) (radius * Math.sin(phi));
vertexData[vOffset++] = (float) (radius * Math.cos(theta) * Math.cos(phi));
textureData[tOffset++] = i * quadWidthRads / horizontalFovRads;
textureData[tOffset++] = (j + k) * quadHeightRads / verticalFovRads;
// Break up the triangle strip with degenerate vertices by copying first and last points.
if ((i == 0 && k == 0) || (i == longitudes && k == 1)) {
System.arraycopy(
vertexData,
vOffset - POSITION_COORDS_PER_VERTEX,
vertexData,
vOffset,
POSITION_COORDS_PER_VERTEX);
vOffset += POSITION_COORDS_PER_VERTEX;
System.arraycopy(
textureData,
tOffset - TEXTURE_COORDS_PER_VERTEX,
textureData,
tOffset,
TEXTURE_COORDS_PER_VERTEX);
tOffset += TEXTURE_COORDS_PER_VERTEX;
}
}
// Move on to the next vertical edge in the triangle strip.
}
// Move on to the next triangle strip.
}
SubMesh subMesh =
new SubMesh(SubMesh.VIDEO_TEXTURE_ID, vertexData, textureData, DRAW_MODE_TRIANGLES_STRIP);
return new Projection(new Mesh(subMesh), stereoMode);
}
/** The Mesh corresponding to the left eye. */
public final Mesh leftMesh;
/**
* The Mesh corresponding to the right eye. If {@code singleMesh} is true then this mesh is
* identical to {@link #leftMesh}.
*/
public final Mesh rightMesh;
/** The stereo mode. */
public final @StereoMode int stereoMode;
/** Whether the left and right mesh are identical. */
public final boolean singleMesh;
/**
* Creates a Projection with single mesh.
*
* @param mesh the Mesh for both eyes.
* @param stereoMode A {@link StereoMode} value.
*/
public Projection(Mesh mesh, int stereoMode) {
this(mesh, mesh, stereoMode);
}
/**
* Creates a Projection with dual mesh. Use {@link #Projection(Mesh, int)} if there is single mesh
* for both eyes.
*
* @param leftMesh the Mesh corresponding to the left eye.
* @param rightMesh the Mesh corresponding to the right eye.
* @param stereoMode A {@link C.StereoMode} value.
*/
public Projection(Mesh leftMesh, Mesh rightMesh, int stereoMode) {
this.leftMesh = leftMesh;
this.rightMesh = rightMesh;
this.stereoMode = stereoMode;
this.singleMesh = leftMesh == rightMesh;
}
/** The sub mesh associated with the {@link Mesh}. */
public static final class SubMesh {
/** Texture ID for video frames. */
public static final int VIDEO_TEXTURE_ID = 0;
/** Texture ID. */
public final int textureId;
/** The drawing mode. One of {@link DrawMode}. */
public final @DrawMode int mode;
/** The SubMesh vertices. */
public final float[] vertices;
/** The SubMesh texture coordinates. */
public final float[] textureCoords;
public SubMesh(int textureId, float[] vertices, float[] textureCoords, @DrawMode int mode) {
this.textureId = textureId;
Assertions.checkArgument(
vertices.length * (long) TEXTURE_COORDS_PER_VERTEX
== textureCoords.length * (long) POSITION_COORDS_PER_VERTEX);
this.vertices = vertices;
this.textureCoords = textureCoords;
this.mode = mode;
}
/** Returns the SubMesh vertex count. */
public int getVertexCount() {
return vertices.length / POSITION_COORDS_PER_VERTEX;
}
}
/** A Mesh associated with the projection scene. */
public static final class Mesh {
private final SubMesh[] subMeshes;
public Mesh(SubMesh... subMeshes) {
this.subMeshes = subMeshes;
}
/** Returns the number of sub meshes. */
public int getSubMeshCount() {
return subMeshes.length;
}
/** Returns the SubMesh for the given index. */
public SubMesh getSubMesh(int index) {
return subMeshes[index];
}
}
}

View File

@ -0,0 +1,233 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.video.spherical;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.ParsableBitArray;
import com.google.android.exoplayer2.util.ParsableByteArray;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.spherical.Projection.Mesh;
import com.google.android.exoplayer2.video.spherical.Projection.SubMesh;
import java.util.ArrayList;
import java.util.zip.Inflater;
/**
* A decoder for the projection mesh.
*
* <p>The mesh boxes parsed are described at <a
* href="https://github.com/google/spatial-media/blob/master/docs/spherical-video-v2-rfc.md">
* Spherical Video V2 RFC</a>.
*
* <p>The decoder does not perform CRC checks at the moment.
*/
public final class ProjectionDecoder {
private static final int TYPE_YTMP = Util.getIntegerCodeForString("ytmp");
private static final int TYPE_MSHP = Util.getIntegerCodeForString("mshp");
private static final int TYPE_RAW = Util.getIntegerCodeForString("raw ");
private static final int TYPE_DFL8 = Util.getIntegerCodeForString("dfl8");
private static final int TYPE_MESH = Util.getIntegerCodeForString("mesh");
private static final int TYPE_PROJ = Util.getIntegerCodeForString("proj");
// Sanity limits to prevent a bad file from creating an OOM situation. We don't expect a mesh to
// exceed these limits.
private static final int MAX_COORDINATE_COUNT = 10000;
private static final int MAX_VERTEX_COUNT = 32 * 1000;
private static final int MAX_TRIANGLE_INDICES = 128 * 1000;
private ProjectionDecoder() {}
/*
* Decodes the projection data.
*
* @param projectionData The projection data.
* @param stereoMode A {@link C.StereoMode} value.
* @return The projection or null if the data can't be decoded.
*/
public static @Nullable Projection decode(byte[] projectionData, @C.StereoMode int stereoMode) {
ParsableByteArray input = new ParsableByteArray(projectionData);
// MP4 containers include the proj box but webm containers do not.
// Both containers use mshp.
ArrayList<Mesh> meshes = null;
try {
meshes = isProj(input) ? parseProj(input) : parseMshp(input);
} catch (ArrayIndexOutOfBoundsException ignored) {
// Do nothing.
}
if (meshes == null) {
return null;
} else {
switch (meshes.size()) {
case 1:
return new Projection(meshes.get(0), stereoMode);
case 2:
return new Projection(meshes.get(0), meshes.get(1), stereoMode);
case 0:
default:
return null;
}
}
}
/** Returns true if the input contains a proj box. Indicates MP4 container. */
private static boolean isProj(ParsableByteArray input) {
input.skipBytes(4); // size
int type = input.readInt();
input.setPosition(0);
return type == TYPE_PROJ;
}
private static @Nullable ArrayList<Mesh> parseProj(ParsableByteArray input) {
input.skipBytes(8); // size and type.
int position = input.getPosition();
int limit = input.limit();
while (position < limit) {
int childEnd = position + input.readInt();
if (childEnd <= position || childEnd > limit) {
return null;
}
int childAtomType = input.readInt();
// Some early files named the atom ytmp rather than mshp.
if (childAtomType == TYPE_YTMP || childAtomType == TYPE_MSHP) {
input.setLimit(childEnd);
return parseMshp(input);
}
position = childEnd;
input.setPosition(position);
}
return null;
}
private static @Nullable ArrayList<Mesh> parseMshp(ParsableByteArray input) {
int version = input.readUnsignedByte();
if (version != 0) {
return null;
}
input.skipBytes(7); // flags + crc.
int encoding = input.readInt();
if (encoding == TYPE_DFL8) {
ParsableByteArray output = new ParsableByteArray();
if (!Util.inflate(input, output, new Inflater(true))) {
return null;
}
input = output;
} else if (encoding != TYPE_RAW) {
return null;
}
return parseRawMshpData(input);
}
/** Parses MSHP data after the encoding_four_cc field. */
private static @Nullable ArrayList<Mesh> parseRawMshpData(ParsableByteArray input) {
ArrayList<Mesh> meshes = new ArrayList<>();
int position = input.getPosition();
int limit = input.limit();
while (position < limit) {
int childEnd = position + input.readInt();
if (childEnd <= position || childEnd > limit) {
return null;
}
int childAtomType = input.readInt();
if (childAtomType == TYPE_MESH) {
Mesh mesh = parseMesh(input);
if (mesh == null) {
return null;
}
meshes.add(mesh);
}
position = childEnd;
input.setPosition(position);
}
return meshes;
}
private static @Nullable Mesh parseMesh(ParsableByteArray input) {
// Read the coordinates.
int coordinateCount = input.readInt();
if (coordinateCount > MAX_COORDINATE_COUNT) {
return null;
}
float[] coordinates = new float[coordinateCount];
for (int coordinate = 0; coordinate < coordinateCount; coordinate++) {
coordinates[coordinate] = input.readFloat();
}
// Read the vertices.
int vertexCount = input.readInt();
if (vertexCount > MAX_VERTEX_COUNT) {
return null;
}
final double log2 = Math.log(2.0);
int coordinateCountSizeBits = (int) Math.ceil(Math.log(2.0 * coordinateCount) / log2);
ParsableBitArray bitInput = new ParsableBitArray(input.data);
bitInput.setPosition(input.getPosition() * 8);
float[] vertices = new float[vertexCount * 5];
int[] coordinateIndices = new int[5];
int vertexIndex = 0;
for (int vertex = 0; vertex < vertexCount; vertex++) {
for (int i = 0; i < 5; i++) {
int coordinateIndex =
coordinateIndices[i] + decodeZigZag(bitInput.readBits(coordinateCountSizeBits));
if (coordinateIndex >= coordinateCount || coordinateIndex < 0) {
return null;
}
vertices[vertexIndex++] = coordinates[coordinateIndex];
coordinateIndices[i] = coordinateIndex;
}
}
// Pad to next byte boundary
bitInput.setPosition(((bitInput.getPosition() + 7) & ~7));
int subMeshCount = bitInput.readBits(32);
SubMesh[] subMeshes = new SubMesh[subMeshCount];
for (int i = 0; i < subMeshCount; i++) {
int textureId = bitInput.readBits(8);
int drawMode = bitInput.readBits(8);
int triangleIndexCount = bitInput.readBits(32);
if (triangleIndexCount > MAX_TRIANGLE_INDICES) {
return null;
}
int vertexCountSizeBits = (int) Math.ceil(Math.log(2.0 * vertexCount) / log2);
int index = 0;
float[] triangleVertices = new float[triangleIndexCount * 3];
float[] textureCoords = new float[triangleIndexCount * 2];
for (int counter = 0; counter < triangleIndexCount; counter++) {
index += decodeZigZag(bitInput.readBits(vertexCountSizeBits));
if (index < 0 || index >= vertexCount) {
return null;
}
triangleVertices[counter * 3] = vertices[index * 5];
triangleVertices[counter * 3 + 1] = vertices[index * 5 + 1];
triangleVertices[counter * 3 + 2] = vertices[index * 5 + 2];
textureCoords[counter * 2] = vertices[index * 5 + 3];
textureCoords[counter * 2 + 1] = vertices[index * 5 + 4];
}
subMeshes[i] = new SubMesh(textureId, triangleVertices, textureCoords, drawMode);
}
return new Mesh(subMeshes);
}
/**
* Decodes Zigzag encoding as described in
* https://developers.google.com/protocol-buffers/docs/encoding#signed-integers
*/
private static int decodeZigZag(int n) {
return (n >> 1) ^ -(n & 1);
}
}

View File

@ -0,0 +1,95 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.video.spherical;
import static com.google.common.truth.Truth.assertThat;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Util;
import java.util.Arrays;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests for {@link ProjectionDecoder}. */
@RunWith(RobolectricTestRunner.class)
public final class ProjectionDecoderTest {
private static final byte[] PROJ_DATA =
Util.getBytesFromHexString(
"0000008D70726F6A0000008579746D7000000000ABA158D672617720000000716D65736800000006BF800000"
+ "3F8000003F0000003F2AAAAB000000003EAAAAAB000000100024200104022430010421034020400123"
+ "1020401013020010102222001001003100200010320010000000010000000000240084009066080420"
+ "9020108421002410860214C1200660");
private static final int MSHP_OFFSET = 16;
private static final int VERTEX_COUNT = 36;
private static final float[] FIRST_VERTEX = {-1.0f, -1.0f, 1.0f};
private static final float[] LAST_VERTEX = {1.0f, -1.0f, -1.0f};
private static final float[] FIRST_UV = {0.5f, 1.0f};
private static final float[] LAST_UV = {1.0f, 1.0f};
@Test
public void testDecodeProj() {
testDecoding(PROJ_DATA);
}
@Test
public void testDecodeMshp() {
testDecoding(Arrays.copyOfRange(PROJ_DATA, MSHP_OFFSET, PROJ_DATA.length));
}
private static void testDecoding(byte[] data) {
Projection projection = ProjectionDecoder.decode(data, C.STEREO_MODE_MONO);
assertThat(projection).isNotNull();
assertThat(projection.stereoMode).isEqualTo(C.STEREO_MODE_MONO);
assertThat(projection.leftMesh).isNotNull();
assertThat(projection.rightMesh).isNotNull();
assertThat(projection.singleMesh).isTrue();
testSubMesh(projection.leftMesh);
}
/** Tests the that SubMesh (mesh with the video) contains expected data. */
private static void testSubMesh(Projection.Mesh leftMesh) {
assertThat(leftMesh.getSubMeshCount()).isEqualTo(1);
Projection.SubMesh subMesh = leftMesh.getSubMesh(0);
assertThat(subMesh.mode).isEqualTo(Projection.DRAW_MODE_TRIANGLES);
float[] vertices = subMesh.vertices;
float[] uv = subMesh.textureCoords;
assertThat(vertices.length).isEqualTo(VERTEX_COUNT * 3);
assertThat(subMesh.textureCoords.length).isEqualTo(VERTEX_COUNT * 2);
// Test first vertex
testCoordinate(FIRST_VERTEX, vertices, 0, 3);
// Test last vertex
testCoordinate(LAST_VERTEX, vertices, VERTEX_COUNT * 3 - 3, 3);
// Test first uv
testCoordinate(FIRST_UV, uv, 0, 2);
// Test last uv
testCoordinate(LAST_UV, uv, VERTEX_COUNT * 2 - 2, 2);
}
/** Tests that the output coordinates match the expected. */
private static void testCoordinate(float[] expected, float[] output, int offset, int count) {
for (int i = 0; i < count; i++) {
Assert.assertEquals(expected[i], output[i + offset]);
}
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.video.spherical;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import com.google.android.exoplayer2.C;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests for {@link Projection}. */
@RunWith(RobolectricTestRunner.class)
public class ProjectionTest {
private static final float EPSILON = .00001f;
// Default 360 sphere.
private static final float RADIUS = 1;
private static final int LATITUDES = 12;
private static final int LONGITUDES = 24;
private static final float VERTICAL_FOV_DEGREES = 180;
private static final float HORIZONTAL_FOV_DEGREES = 360;
@Test
public void testSphericalMesh() throws Exception {
// Only the first param is important in this test.
Projection projection =
Projection.createEquirectangular(
RADIUS,
LATITUDES,
LONGITUDES,
VERTICAL_FOV_DEGREES,
HORIZONTAL_FOV_DEGREES,
C.STEREO_MODE_MONO);
Projection.SubMesh subMesh = projection.leftMesh.getSubMesh(0);
assertThat(subMesh.getVertexCount()).isGreaterThan(LATITUDES * LONGITUDES);
float[] data = subMesh.vertices;
for (int i = 0; i < data.length; ) {
float x = data[i++];
float y = data[i++];
float z = data[i++];
assertEquals(RADIUS, Math.sqrt(x * x + y * y + z * z), EPSILON);
}
}
@Test
public void testArgumentValidation() {
checkIllegalArgumentException(0, 1, 1, 1, 1);
checkIllegalArgumentException(1, 0, 1, 1, 1);
checkIllegalArgumentException(1, 1, 0, 1, 1);
checkIllegalArgumentException(1, 1, 1, 0, 1);
checkIllegalArgumentException(1, 1, 1, 181, 1);
checkIllegalArgumentException(1, 1, 1, 1, 0);
checkIllegalArgumentException(1, 1, 1, 1, 361);
}
private void checkIllegalArgumentException(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees) {
try {
Projection.createEquirectangular(
radius,
latitudes,
longitudes,
verticalFovDegrees,
horizontalFovDegrees,
C.STEREO_MODE_MONO);
fail();
} catch (IllegalArgumentException e) {
// Do nothing. Expected.
}
}
}

View File

@ -509,6 +509,7 @@ public class PlayerView extends FrameLayout {
oldVideoComponent.clearVideoTextureView((TextureView) surfaceView);
} else if (surfaceView instanceof SphericalSurfaceView) {
oldVideoComponent.clearVideoSurface(((SphericalSurfaceView) surfaceView).getSurface());
oldVideoComponent.clearVideoFrameMetadataListener(((SphericalSurfaceView) surfaceView));
} else if (surfaceView instanceof SurfaceView) {
oldVideoComponent.clearVideoSurfaceView((SurfaceView) surfaceView);
}
@ -535,6 +536,7 @@ public class PlayerView extends FrameLayout {
newVideoComponent.setVideoTextureView((TextureView) surfaceView);
} else if (surfaceView instanceof SphericalSurfaceView) {
newVideoComponent.setVideoSurface(((SphericalSurfaceView) surfaceView).getSurface());
newVideoComponent.setVideoFrameMetadataListener(((SphericalSurfaceView) surfaceView));
} else if (surfaceView instanceof SurfaceView) {
newVideoComponent.setVideoSurfaceView((SurfaceView) surfaceView);
}

View File

@ -1,290 +0,0 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.android.exoplayer2.ui.spherical.GlUtil.checkGlError;
import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import com.google.android.exoplayer2.C;
import java.nio.FloatBuffer;
/**
* Utility class to generate & render spherical meshes for video or images. Use the static creation
* methods to construct the Mesh's data. Then call the Mesh constructor on the GL thread when ready.
* Use glDraw method to render it.
*/
@TargetApi(15)
/*package*/ final class Mesh {
/** Defines the constants identifying the current eye type. */
/*package*/ interface EyeType {
/** Single eye in monocular rendering. */
int MONOCULAR = 0;
/** The left eye in stereo rendering. */
int LEFT = 1;
/** The right eye in stereo rendering. */
int RIGHT = 2;
}
// Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data.
private static final String[] VERTEX_SHADER_CODE =
new String[] {
"uniform mat4 uMvpMatrix;",
"attribute vec4 aPosition;",
"attribute vec2 aTexCoords;",
"varying vec2 vTexCoords;",
// Standard transformation.
"void main() {",
" gl_Position = uMvpMatrix * aPosition;",
" vTexCoords = aTexCoords;",
"}"
};
private static final String[] FRAGMENT_SHADER_CODE =
new String[] {
// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.
"#extension GL_OES_EGL_image_external : require",
"precision mediump float;",
// Standard texture rendering shader.
"uniform samplerExternalOES uTexture;",
"varying vec2 vTexCoords;",
"void main() {",
" gl_FragColor = texture2D(uTexture, vTexCoords);",
"}"
};
// Constants related to vertex data.
private static final int POSITION_COORDS_PER_VERTEX = 3; // X, Y, Z.
// The vertex contains texture coordinates for both the left & right eyes. If the scene is
// rendered in VR, the appropriate part of the vertex will be selected at runtime. For a mono
// scene, only the left eye's UV coordinates are used.
// For mono media, the UV coordinates are duplicated in each. For stereo media, the UV coords
// point to the appropriate part of the source media.
private static final int TEXTURE_COORDS_PER_VERTEX = 2 * 2;
private static final int COORDS_PER_VERTEX =
POSITION_COORDS_PER_VERTEX + TEXTURE_COORDS_PER_VERTEX;
// Data is tightly packed. Each vertex is [x, y, z, u_left, v_left, u_right, v_right].
private static final int VERTEX_STRIDE_BYTES = COORDS_PER_VERTEX * C.BYTES_PER_FLOAT;
// Vertices for the mesh with 3D position + left 2D texture UV + right 2D texture UV.
private final int vertixCount;
private final FloatBuffer vertexBuffer;
// Program related GL items. These are only valid if program != 0.
private int program;
private int mvpMatrixHandle;
private int positionHandle;
private int texCoordsHandle;
private int textureHandle;
/**
* Generates a 3D UV sphere for rendering monoscopic or stereoscopic video.
*
* <p>This can be called on any thread. The returned {@link Mesh} isn't valid until {@link
* #init()} is called.
*
* @param radius Size of the sphere. Must be > 0.
* @param latitudes Number of rows that make up the sphere. Must be >= 1.
* @param longitudes Number of columns that make up the sphere. Must be >= 1.
* @param verticalFovDegrees Total latitudinal degrees that are covered by the sphere. Must be in
* (0, 180].
* @param horizontalFovDegrees Total longitudinal degrees that are covered by the sphere.Must be
* in (0, 360].
* @param stereoMode A {@link C.StereoMode} value.
* @return Unintialized Mesh.
*/
public static Mesh createUvSphere(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees,
@C.StereoMode int stereoMode) {
return new Mesh(
createUvSphereVertexData(
radius, latitudes, longitudes, verticalFovDegrees, horizontalFovDegrees, stereoMode));
}
/** Used by static constructors. */
private Mesh(float[] vertexData) {
vertixCount = vertexData.length / COORDS_PER_VERTEX;
vertexBuffer = GlUtil.createBuffer(vertexData);
}
/** Initializes of the GL components. */
/* package */ void init() {
program = GlUtil.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE);
mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix");
positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
texCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords");
textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
}
/**
* Renders the mesh. This must be called on the GL thread.
*
* @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh.
* @param mvpMatrix The Model View Projection matrix.
* @param eyeType An {@link EyeType} value.
*/
/* package */ void draw(int textureId, float[] mvpMatrix, int eyeType) {
// Configure shader.
GLES20.glUseProgram(program);
checkGlError();
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glEnableVertexAttribArray(texCoordsHandle);
checkGlError();
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES20.glUniform1i(textureHandle, 0);
checkGlError();
// Load position data.
vertexBuffer.position(0);
GLES20.glVertexAttribPointer(
positionHandle,
POSITION_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
VERTEX_STRIDE_BYTES,
vertexBuffer);
checkGlError();
// Load texture data. Eye.Type.RIGHT uses the left eye's data.
int textureOffset =
(eyeType == EyeType.RIGHT) ? POSITION_COORDS_PER_VERTEX + 2 : POSITION_COORDS_PER_VERTEX;
vertexBuffer.position(textureOffset);
GLES20.glVertexAttribPointer(
texCoordsHandle,
TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
VERTEX_STRIDE_BYTES,
vertexBuffer);
checkGlError();
// Render.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertixCount);
checkGlError();
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(texCoordsHandle);
}
/** Cleans up the GL resources. */
/* package */ void shutdown() {
if (program != 0) {
GLES20.glDeleteProgram(program);
}
}
// @VisibleForTesting
/*package*/ static float[] createUvSphereVertexData(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees,
@C.StereoMode int stereoMode) {
if (radius <= 0
|| latitudes < 1
|| longitudes < 1
|| verticalFovDegrees <= 0
|| verticalFovDegrees > 180
|| horizontalFovDegrees <= 0
|| horizontalFovDegrees > 360) {
throw new IllegalArgumentException("Invalid parameters for sphere.");
}
// Compute angular size in radians of each UV quad.
float verticalFovRads = (float) Math.toRadians(verticalFovDegrees);
float horizontalFovRads = (float) Math.toRadians(horizontalFovDegrees);
float quadHeightRads = verticalFovRads / latitudes;
float quadWidthRads = horizontalFovRads / longitudes;
// Each latitude strip has 2 * (longitudes quads + extra edge) vertices + 2 degenerate vertices.
int vertexCount = (2 * (longitudes + 1) + 2) * latitudes;
// Buffer to return.
float[] vertexData = new float[vertexCount * COORDS_PER_VERTEX];
// Generate the data for the sphere which is a set of triangle strips representing each
// latitude band.
int offset = 0; // Offset into the vertexData array.
// (i, j) represents a quad in the equirectangular sphere.
for (int j = 0; j < latitudes; ++j) { // For each horizontal triangle strip.
// Each latitude band lies between the two phi values. Each vertical edge on a band lies on
// a theta value.
float phiLow = (quadHeightRads * j - verticalFovRads / 2);
float phiHigh = (quadHeightRads * (j + 1) - verticalFovRads / 2);
for (int i = 0; i < longitudes + 1; ++i) { // For each vertical edge in the band.
for (int k = 0; k < 2; ++k) { // For low and high points on an edge.
// For each point, determine it's position in polar coordinates.
float phi = (k == 0) ? phiLow : phiHigh;
float theta = quadWidthRads * i + (float) Math.PI - horizontalFovRads / 2;
// Set vertex position data as Cartesian coordinates.
vertexData[offset] = -(float) (radius * Math.sin(theta) * Math.cos(phi));
vertexData[offset + 1] = (float) (radius * Math.sin(phi));
vertexData[offset + 2] = (float) (radius * Math.cos(theta) * Math.cos(phi));
// Set vertex texture.x data.
if (stereoMode == C.STEREO_MODE_LEFT_RIGHT) {
// For left-right media, each eye's x coordinate points to the left or right half of the
// texture.
vertexData[offset + 3] = (i * quadWidthRads / horizontalFovRads) / 2;
vertexData[offset + 5] = (i * quadWidthRads / horizontalFovRads) / 2 + .5f;
} else {
// For top-bottom or monoscopic media, the eye's x spans the full width of the texture.
vertexData[offset + 3] = i * quadWidthRads / horizontalFovRads;
vertexData[offset + 5] = i * quadWidthRads / horizontalFovRads;
}
// Set vertex texture.y data. The "1 - ..." is due to Canvas vs GL coords.
if (stereoMode == C.STEREO_MODE_TOP_BOTTOM) {
// For top-bottom media, each eye's y coordinate points to the top or bottom half of the
// texture.
vertexData[offset + 4] = 1 - (((j + k) * quadHeightRads / verticalFovRads) / 2 + .5f);
vertexData[offset + 6] = 1 - ((j + k) * quadHeightRads / verticalFovRads) / 2;
} else {
// For left-right or monoscopic media, the eye's y spans the full height of the texture.
vertexData[offset + 4] = 1 - (j + k) * quadHeightRads / verticalFovRads;
vertexData[offset + 6] = 1 - (j + k) * quadHeightRads / verticalFovRads;
}
offset += COORDS_PER_VERTEX;
// Break up the triangle strip with degenerate vertices by copying first and last points.
if ((i == 0 && k == 0) || (i == longitudes && k == 1)) {
System.arraycopy(
vertexData, offset - COORDS_PER_VERTEX, vertexData, offset, COORDS_PER_VERTEX);
offset += COORDS_PER_VERTEX;
}
}
// Move on to the next vertical edge in the triangle strip.
}
// Move on to the next triangle strip.
}
return vertexData;
}
}

View File

@ -0,0 +1,239 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.android.exoplayer2.ui.spherical.GlUtil.checkGlError;
import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.video.spherical.Projection;
import java.nio.FloatBuffer;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Utility class to render spherical meshes for video or images. Call {@link #init()} on the GL
* thread when ready.
*/
@TargetApi(15)
/*package*/ final class ProjectionRenderer {
/** Defines the constants identifying the current eye type. */
/*package*/ interface EyeType {
/** Single eye in monocular rendering. */
int MONOCULAR = 0;
/** The left eye in stereo rendering. */
int LEFT = 1;
/** The right eye in stereo rendering. */
int RIGHT = 2;
}
/**
* Returns whether {@code projection} is supported. At least it should have left mesh and there
* should be only one sub mesh per mesh.
*/
public static boolean isSupported(Projection projection) {
Projection.Mesh leftMesh = projection.leftMesh;
Projection.Mesh rightMesh = projection.rightMesh;
return leftMesh.getSubMeshCount() == 1
&& leftMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID
&& rightMesh.getSubMeshCount() == 1
&& rightMesh.getSubMesh(0).textureId == Projection.SubMesh.VIDEO_TEXTURE_ID;
}
// Basic vertex & fragment shaders to render a mesh with 3D position & 2D texture data.
private static final String[] VERTEX_SHADER_CODE =
new String[] {
"uniform mat4 uMvpMatrix;",
"uniform mat3 uTexMatrix;",
"attribute vec4 aPosition;",
"attribute vec2 aTexCoords;",
"varying vec2 vTexCoords;",
// Standard transformation.
"void main() {",
" gl_Position = uMvpMatrix * aPosition;",
" vTexCoords = (uTexMatrix * vec3(aTexCoords, 1)).xy;",
"}"
};
private static final String[] FRAGMENT_SHADER_CODE =
new String[] {
// This is required since the texture data is GL_TEXTURE_EXTERNAL_OES.
"#extension GL_OES_EGL_image_external : require",
"precision mediump float;",
// Standard texture rendering shader.
"uniform samplerExternalOES uTexture;",
"varying vec2 vTexCoords;",
"void main() {",
" gl_FragColor = texture2D(uTexture, vTexCoords);",
"}"
};
// Texture transform matrices.
private static final float[] TEX_MATRIX_WHOLE = {
1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f
};
private static final float[] TEX_MATRIX_TOP = {
1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 0.5f, 1.0f
};
private static final float[] TEX_MATRIX_BOTTOM = {
1.0f, 0.0f, 0.0f, 0.0f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f
};
private static final float[] TEX_MATRIX_LEFT = {
0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f
};
private static final float[] TEX_MATRIX_RIGHT = {
0.5f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.5f, 1.0f, 1.0f
};
private int stereoMode;
private @Nullable MeshData leftMeshData;
private @Nullable MeshData rightMeshData;
// Program related GL items. These are only valid if program != 0.
private int program;
private int mvpMatrixHandle;
private int uTexMatrixHandle;
private int positionHandle;
private int texCoordsHandle;
private int textureHandle;
/**
* Sets a {@link Projection} to be used.
*
* @param projection Contains the projection data to be rendered.
* @see #isSupported(Projection)
*/
public void setProjection(Projection projection) {
if (!isSupported(projection)) {
return;
}
stereoMode = projection.stereoMode;
leftMeshData = new MeshData(projection.leftMesh.getSubMesh(0));
rightMeshData =
projection.singleMesh ? leftMeshData : new MeshData(projection.rightMesh.getSubMesh(0));
}
/** Initializes of the GL components. */
/* package */ void init() {
program = GlUtil.compileProgram(VERTEX_SHADER_CODE, FRAGMENT_SHADER_CODE);
mvpMatrixHandle = GLES20.glGetUniformLocation(program, "uMvpMatrix");
uTexMatrixHandle = GLES20.glGetUniformLocation(program, "uTexMatrix");
positionHandle = GLES20.glGetAttribLocation(program, "aPosition");
texCoordsHandle = GLES20.glGetAttribLocation(program, "aTexCoords");
textureHandle = GLES20.glGetUniformLocation(program, "uTexture");
}
/**
* Renders the mesh. This must be called on the GL thread.
*
* @param textureId GL_TEXTURE_EXTERNAL_OES used for this mesh.
* @param mvpMatrix The Model View Projection matrix.
* @param eyeType An {@link EyeType} value.
*/
/* package */ void draw(int textureId, float[] mvpMatrix, int eyeType) {
// Configure shader.
GLES20.glUseProgram(program);
checkGlError();
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glEnableVertexAttribArray(texCoordsHandle);
checkGlError();
float[] texMatrix;
if (stereoMode == C.STEREO_MODE_TOP_BOTTOM) {
texMatrix = eyeType == EyeType.RIGHT ? TEX_MATRIX_BOTTOM : TEX_MATRIX_TOP;
} else if (stereoMode == C.STEREO_MODE_LEFT_RIGHT) {
texMatrix = eyeType == EyeType.RIGHT ? TEX_MATRIX_RIGHT : TEX_MATRIX_LEFT;
} else {
texMatrix = TEX_MATRIX_WHOLE;
}
GLES20.glUniformMatrix3fv(uTexMatrixHandle, 1, false, texMatrix, 0);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES20.glUniform1i(textureHandle, 0);
checkGlError();
MeshData meshData =
Assertions.checkNotNull(eyeType == EyeType.RIGHT ? rightMeshData : leftMeshData);
// Load position data.
GLES20.glVertexAttribPointer(
positionHandle,
Projection.POSITION_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
Projection.POSITION_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT,
meshData.vertexBuffer);
checkGlError();
// Load texture data.
GLES20.glVertexAttribPointer(
texCoordsHandle,
Projection.TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT,
false,
Projection.TEXTURE_COORDS_PER_VERTEX * C.BYTES_PER_FLOAT,
meshData.textureBuffer);
checkGlError();
// Render.
GLES20.glDrawArrays(meshData.drawMode, 0, meshData.vertexCount);
checkGlError();
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(texCoordsHandle);
}
/** Cleans up the GL resources. */
/* package */ void shutdown() {
if (program != 0) {
GLES20.glDeleteProgram(program);
}
}
private static class MeshData {
private final int vertexCount;
private final FloatBuffer vertexBuffer;
private final FloatBuffer textureBuffer;
@Projection.DrawMode private final int drawMode;
public MeshData(Projection.SubMesh subMesh) {
vertexCount = subMesh.getVertexCount();
vertexBuffer = GlUtil.createBuffer(subMesh.vertices);
textureBuffer = GlUtil.createBuffer(subMesh.textureCoords);
switch (subMesh.mode) {
case Projection.DRAW_MODE_TRIANGLES_STRIP:
drawMode = GLES20.GL_TRIANGLE_STRIP;
break;
case Projection.DRAW_MODE_TRIANGLES_FAN:
drawMode = GLES20.GL_TRIANGLE_FAN;
break;
case Projection.DRAW_MODE_TRIANGLES:
default:
drawMode = GLES20.GL_TRIANGLES;
break;
}
}
}
}

View File

@ -20,8 +20,9 @@ import static com.google.android.exoplayer2.ui.spherical.GlUtil.checkGlError;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.support.annotation.Nullable;
import com.google.android.exoplayer2.ui.spherical.Mesh.EyeType;
import com.google.android.exoplayer2.ui.spherical.ProjectionRenderer.EyeType;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.video.spherical.Projection;
import java.util.concurrent.atomic.AtomicBoolean;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@ -33,14 +34,18 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/*package*/ final class SceneRenderer {
private final AtomicBoolean frameAvailable;
private final ProjectionRenderer projectionRenderer;
private int textureId;
@Nullable private SurfaceTexture surfaceTexture;
@MonotonicNonNull private Mesh mesh;
private boolean meshInitialized;
private @MonotonicNonNull SurfaceTexture surfaceTexture;
private @Nullable Projection pendingProjection;
private long pendingProjectionTimeNs;
private long lastFrameTimestamp;
public SceneRenderer() {
public SceneRenderer(Projection projection) {
frameAvailable = new AtomicBoolean();
projectionRenderer = new ProjectionRenderer();
projectionRenderer.setProjection(projection);
}
/** Initializes the renderer. */
@ -49,19 +54,19 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
checkGlError();
projectionRenderer.init();
checkGlError();
textureId = GlUtil.createExternalTexture();
surfaceTexture = new SurfaceTexture(textureId);
surfaceTexture.setOnFrameAvailableListener(surfaceTexture -> frameAvailable.set(true));
return surfaceTexture;
}
/** Sets a {@link Mesh} to be used to display video. */
public void setMesh(Mesh mesh) {
if (this.mesh != null) {
this.mesh.shutdown();
}
this.mesh = mesh;
meshInitialized = false;
/** Sets a {@link Projection} to be used to display video. */
public void setProjection(Projection projection, long timeNs) {
pendingProjection = projection;
pendingProjectionTimeNs = timeNs;
}
/**
@ -71,14 +76,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
* @param eyeType an {@link EyeType} value
*/
public void drawFrame(float[] viewProjectionMatrix, int eyeType) {
if (mesh == null) {
return;
}
if (!meshInitialized) {
meshInitialized = true;
mesh.init();
}
// glClear isn't strictly necessary when rendering fully spherical panoramas, but it can improve
// performance on tiled renderers by causing the GPU to discard previous data.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
@ -87,8 +84,13 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (frameAvailable.compareAndSet(true, false)) {
Assertions.checkNotNull(surfaceTexture).updateTexImage();
checkGlError();
lastFrameTimestamp = surfaceTexture.getTimestamp();
}
if (pendingProjection != null && pendingProjectionTimeNs <= lastFrameTimestamp) {
projectionRenderer.setProjection(pendingProjection);
pendingProjection = null;
}
mesh.draw(textureId, viewProjectionMatrix, eyeType);
projectionRenderer.draw(textureId, viewProjectionMatrix, eyeType);
}
}

View File

@ -37,9 +37,14 @@ import android.view.Display;
import android.view.Surface;
import android.view.WindowManager;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ui.spherical.Mesh.EyeType;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.ui.spherical.ProjectionRenderer.EyeType;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoFrameMetadataListener;
import com.google.android.exoplayer2.video.spherical.Projection;
import com.google.android.exoplayer2.video.spherical.ProjectionDecoder;
import java.util.Arrays;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
@ -54,7 +59,8 @@ import javax.microedition.khronos.opengles.GL10;
* match what they expect.
*/
@TargetApi(15)
public final class SphericalSurfaceView extends GLSurfaceView {
public final class SphericalSurfaceView extends GLSurfaceView
implements VideoFrameMetadataListener {
/**
* This listener can be used to be notified when the {@link Surface} associated with this view is
@ -70,17 +76,6 @@ public final class SphericalSurfaceView extends GLSurfaceView {
void surfaceChanged(@Nullable Surface surface);
}
// A spherical mesh for video should be large enough that there are no stereo artifacts.
private static final int SPHERE_RADIUS_METERS = 50;
// TODO These should be configured based on the video type. It's assumed 360 video here.
private static final int DEFAULT_SPHERE_HORIZONTAL_DEGREES = 360;
private static final int DEFAULT_SPHERE_VERTICAL_DEGREES = 180;
// The 360 x 180 sphere has 5 degree quads. Increase these if lines in videos look wavy.
private static final int DEFAULT_SPHERE_COLUMNS = 72;
private static final int DEFAULT_SPHERE_ROWS = 36;
// Arbitrary vertical field of view.
private static final int FIELD_OF_VIEW_DEGREES = 90;
private static final float Z_NEAR = .1f;
@ -99,6 +94,9 @@ public final class SphericalSurfaceView extends GLSurfaceView {
private @Nullable SurfaceListener surfaceListener;
private @Nullable SurfaceTexture surfaceTexture;
private @Nullable Surface surface;
private @C.StreamType int defaultStereoMode;
private @C.StreamType int currentStereoMode;
private @Nullable byte[] currentProjectionData;
public SphericalSurfaceView(Context context) {
this(context, null);
@ -107,6 +105,8 @@ public final class SphericalSurfaceView extends GLSurfaceView {
public SphericalSurfaceView(Context context, @Nullable AttributeSet attributeSet) {
super(context, attributeSet);
defaultStereoMode = C.STEREO_MODE_MONO;
currentStereoMode = C.STEREO_MODE_MONO;
mainHandler = new Handler(Looper.getMainLooper());
// Configure sensors and touch.
@ -129,30 +129,16 @@ public final class SphericalSurfaceView extends GLSurfaceView {
setEGLContextClientVersion(2);
setRenderer(renderer);
setOnTouchListener(touchTracker);
setStereoMode(C.STEREO_MODE_MONO);
}
/**
* Sets stereo mode of the media to be played.
* Sets the default stereo mode. If the played video doesn't contain a stereo mode the default one
* is used.
*
* @param stereoMode One of {@link C#STEREO_MODE_MONO}, {@link C#STEREO_MODE_TOP_BOTTOM}, {@link
* C#STEREO_MODE_LEFT_RIGHT}.
* @param stereoMode A {@link C.StereoMode} value.
*/
public void setStereoMode(@C.StereoMode int stereoMode) {
Assertions.checkState(
stereoMode == C.STEREO_MODE_MONO
|| stereoMode == C.STEREO_MODE_TOP_BOTTOM
|| stereoMode == C.STEREO_MODE_LEFT_RIGHT);
Mesh mesh =
Mesh.createUvSphere(
SPHERE_RADIUS_METERS,
DEFAULT_SPHERE_ROWS,
DEFAULT_SPHERE_COLUMNS,
DEFAULT_SPHERE_VERTICAL_DEGREES,
DEFAULT_SPHERE_HORIZONTAL_DEGREES,
stereoMode);
queueEvent(() -> renderer.scene.setMesh(mesh));
public void setDefaultStereoMode(@C.StereoMode int stereoMode) {
defaultStereoMode = stereoMode;
}
/** Returns the {@link Surface} associated with this view. */
@ -169,6 +155,12 @@ public final class SphericalSurfaceView extends GLSurfaceView {
surfaceListener = listener;
}
@Override
public void onVideoFrameAboutToBeRendered(
long presentationTimeUs, long releaseTimeNs, Format format) {
setProjection(format.projectionData, format.stereoMode, releaseTimeNs);
}
@Override
public void onResume() {
super.onResume();
@ -230,6 +222,35 @@ public final class SphericalSurfaceView extends GLSurfaceView {
}
}
/**
* Sets projection data and stereo mode of the media to be played.
*
* @param projectionData Contains the projection data to be rendered.
* @param stereoMode A {@link C.StereoMode} value.
* @param timeNs When then new projection should be used.
*/
private void setProjection(
@Nullable byte[] projectionData, @C.StereoMode int stereoMode, long timeNs) {
byte[] oldProjectionData = currentProjectionData;
int oldStereoMode = currentStereoMode;
currentProjectionData = projectionData;
currentStereoMode = stereoMode == Format.NO_VALUE ? defaultStereoMode : stereoMode;
if (oldStereoMode == currentStereoMode
&& Arrays.equals(oldProjectionData, currentProjectionData)) {
return;
}
Projection projectionFromData = null;
if (currentProjectionData != null) {
projectionFromData = ProjectionDecoder.decode(currentProjectionData, currentStereoMode);
}
Projection projection =
projectionFromData != null && ProjectionRenderer.isSupported(projectionFromData)
? projectionFromData
: Projection.createEquirectangular(currentStereoMode);
queueEvent(() -> renderer.scene.setProjection(projection, timeNs));
}
/** Detects sensor events and saves them as a matrix. */
private static class PhoneOrientationListener implements SensorEventListener {
private final float[] phoneInWorldSpaceMatrix = new float[16];
@ -328,7 +349,7 @@ public final class SphericalSurfaceView extends GLSurfaceView {
private final float[] tempMatrix = new float[16];
public Renderer() {
scene = new SceneRenderer();
scene = new SceneRenderer(Projection.createEquirectangular(C.STEREO_MODE_MONO));
Matrix.setIdentityM(deviceOrientationMatrix, 0);
Matrix.setIdentityM(touchPitchMatrix, 0);
Matrix.setIdentityM(touchYawMatrix, 0);