Fix TouchTrackerTest

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=206304655
This commit is contained in:
eguven 2018-07-27 05:27:16 -07:00 committed by Oliver Woodman
parent 4eee474555
commit 30fecb71d2
4 changed files with 123 additions and 416 deletions

View file

@ -34,9 +34,7 @@ import android.support.annotation.Nullable;
import android.support.annotation.UiThread;
import android.util.AttributeSet;
import android.view.Display;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
import android.view.WindowManager;
import com.google.android.exoplayer2.ui.spherical.Mesh.EyeType;
import com.google.android.exoplayer2.util.Assertions;
@ -87,14 +85,10 @@ public final class SphericalSurfaceView extends GLSurfaceView {
private static final float Z_NEAR = .1f;
private static final float Z_FAR = 100;
// Arbitrary touch speed number. This should be tweaked so the scene smoothly follows the
// finger or derived from DisplayMetrics.
// TODO Calculate this depending on surface size and field of view.
private static final float PX_PER_DEGREES = 25;
// Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations
// where the touch-based pitch and gyro-based pitch interact badly near the poles.
private static final float MAX_PITCH_DEGREES = 45;
private static final float UPRIGHT_ROLL = (float) Math.PI;
/*package*/ static final float UPRIGHT_ROLL = (float) Math.PI;
private final SensorManager sensorManager;
private final @Nullable Sensor orientationSensor;
@ -126,7 +120,7 @@ public final class SphericalSurfaceView extends GLSurfaceView {
renderer = new Renderer();
TouchTracker touchTracker = new TouchTracker(renderer);
TouchTracker touchTracker = new TouchTracker(renderer, PX_PER_DEGREES);
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = Assertions.checkNotNull(windowManager).getDefaultDisplay();
phoneOrientationListener = new PhoneOrientationListener(display, touchTracker, renderer);
@ -291,97 +285,12 @@ public final class SphericalSurfaceView extends GLSurfaceView {
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
}
/**
* Basic touch input system.
*
* <p>Mixing touch input and gyro input results in a complicated UI so this should be used
* carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works
* for basic UI but fails in edge cases where the user tries to drag scene up or down. There is no
* good UX solution for this. The least bad solution is to disable pitch manipulation and only let
* the user adjust yaw. This example tries to limit the awkwardness by restricting pitch
* manipulation to +/- 45 degrees.
*
* <p>It is also important to get the order of operations correct. To match what users expect,
* touch interaction manipulates the scene by rotating the world by the yaw offset and tilting the
* camera by the pitch offset. If the order of operations is incorrect, the sensors & touch
* rotations will have strange interactions. The roll of the phone is also tracked so that the x &
* y are correctly mapped to yaw & pitch no matter how the user holds their phone.
*
* <p>This class doesn't handle any scrolling inertia but Android's
* com.google.vr.sdk.widgets.common.TouchTracker.FlingGestureListener can be used with this code
* for a nicer UI. An even more advanced UI would reproject the user's touch point into 3D and
* drag the Mesh as the user moves their finger. However, that requires quaternion interpolation
* and is beyond the scope of this sample.
*/
// @VisibleForTesting
/*package*/ static class TouchTracker implements OnTouchListener {
// With every touch event, update the accumulated degrees offset by the new pixel amount.
private final PointF previousTouchPointPx = new PointF();
private final PointF accumulatedTouchOffsetDegrees = new PointF();
// The conversion from touch to yaw & pitch requires compensating for device roll. This is set
// on the sensor thread and read on the UI thread.
private volatile float roll;
private final Renderer renderer;
public TouchTracker(Renderer renderer) {
this.renderer = renderer;
roll = UPRIGHT_ROLL;
}
/**
* Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll.
*
* @return true if we handled the event
*/
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Initialize drag gesture.
previousTouchPointPx.set(event.getX(), event.getY());
return true;
case MotionEvent.ACTION_MOVE:
// Calculate the touch delta in screen space.
float touchX = (event.getX() - previousTouchPointPx.x) / PX_PER_DEGREES;
float touchY = (event.getY() - previousTouchPointPx.y) / PX_PER_DEGREES;
previousTouchPointPx.set(event.getX(), event.getY());
float r = roll; // Copy volatile state.
float cr = (float) Math.cos(r);
float sr = (float) Math.sin(r);
// To convert from screen space to the 3D space, we need to adjust the drag vector based
// on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has
// an inverted y-axis due to the screen-space coordinates vs GL coordinates.
// Handle yaw.
accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY;
// Handle pitch and limit it to 45 degrees.
accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY;
accumulatedTouchOffsetDegrees.y =
Math.max(
-MAX_PITCH_DEGREES, Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y));
renderer.setPitchOffset(accumulatedTouchOffsetDegrees.y);
renderer.setYawOffset(accumulatedTouchOffsetDegrees.x);
return true;
default:
return false;
}
}
@BinderThread
public void setRoll(float roll) {
// We compensate for roll by rotating in the opposite direction.
this.roll = -roll;
}
}
/**
* Standard GL Renderer implementation. The notable code is the matrix multiplication in
* onDrawFrame and updatePitchMatrix.
*/
// @VisibleForTesting
/*package*/ class Renderer implements GLSurfaceView.Renderer {
/*package*/ class Renderer implements GLSurfaceView.Renderer, TouchTracker.Listener {
private final SceneRenderer scene;
private final float[] projectionMatrix = new float[16];
@ -464,17 +373,12 @@ public final class SphericalSurfaceView extends GLSurfaceView {
0);
}
/** Set the pitch offset matrix. */
@Override
@UiThread
public synchronized void setPitchOffset(float pitchDegrees) {
touchPitch = pitchDegrees;
public synchronized void onScrollChange(PointF scrollOffsetDegrees) {
touchPitch = scrollOffsetDegrees.y;
updatePitchMatrix();
}
/** Set the yaw offset matrix. */
@UiThread
public synchronized void setYawOffset(float yawDegrees) {
Matrix.setRotateM(touchYawMatrix, 0, -yawDegrees, 0, 1, 0);
Matrix.setRotateM(touchYawMatrix, 0, -scrollOffsetDegrees.x, 0, 1, 0);
}
private float calculateFieldOfViewInYDirection(float aspect) {

View file

@ -0,0 +1,115 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import android.graphics.PointF;
import android.support.annotation.BinderThread;
import android.view.MotionEvent;
import android.view.View;
/**
* Basic touch input system.
*
* <p>Mixing touch input and gyro input results in a complicated UI so this should be used
* carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works for
* basic UI but fails in edge cases where the user tries to drag scene up or down. There is no good
* UX solution for this. The least bad solution is to disable pitch manipulation and only let the
* user adjust yaw. This example tries to limit the awkwardness by restricting pitch manipulation to
* +/- 45 degrees.
*
* <p>It is also important to get the order of operations correct. To match what users expect, touch
* interaction manipulates the scene by rotating the world by the yaw offset and tilting the camera
* by the pitch offset. If the order of operations is incorrect, the sensors & touch rotations will
* have strange interactions. The roll of the phone is also tracked so that the x & y are correctly
* mapped to yaw & pitch no matter how the user holds their phone.
*
* <p>This class doesn't handle any scrolling inertia but Android's
* com.google.vr.sdk.widgets.common.TouchTracker.FlingGestureListener can be used with this code for
* a nicer UI. An even more advanced UI would reproject the user's touch point into 3D and drag the
* Mesh as the user moves their finger. However, that requires quaternion interpolation.
*/
// @VisibleForTesting
/*package*/ class TouchTracker implements View.OnTouchListener {
/*package*/ interface Listener {
void onScrollChange(PointF scrollOffsetDegrees);
}
// Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations
// where the touch-based pitch and gyro-based pitch interact badly near the poles.
/*package*/ static final float MAX_PITCH_DEGREES = 45;
// With every touch event, update the accumulated degrees offset by the new pixel amount.
private final PointF previousTouchPointPx = new PointF();
private final PointF accumulatedTouchOffsetDegrees = new PointF();
private final Listener listener;
private final float pxPerDegrees;
// The conversion from touch to yaw & pitch requires compensating for device roll. This is set
// on the sensor thread and read on the UI thread.
private volatile float roll;
public TouchTracker(Listener listener, float pxPerDegrees) {
this.listener = listener;
this.pxPerDegrees = pxPerDegrees;
roll = SphericalSurfaceView.UPRIGHT_ROLL;
}
/**
* Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll.
*
* @return true if we handled the event
*/
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
// Initialize drag gesture.
previousTouchPointPx.set(event.getX(), event.getY());
return true;
case MotionEvent.ACTION_MOVE:
// Calculate the touch delta in screen space.
float touchX = (event.getX() - previousTouchPointPx.x) / pxPerDegrees;
float touchY = (event.getY() - previousTouchPointPx.y) / pxPerDegrees;
previousTouchPointPx.set(event.getX(), event.getY());
float r = roll; // Copy volatile state.
float cr = (float) Math.cos(r);
float sr = (float) Math.sin(r);
// To convert from screen space to the 3D space, we need to adjust the drag vector based
// on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has
// an inverted y-axis due to the screen-space coordinates vs GL coordinates.
// Handle yaw.
accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY;
// Handle pitch and limit it to 45 degrees.
accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY;
accumulatedTouchOffsetDegrees.y =
Math.max(
-MAX_PITCH_DEGREES, Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y));
listener.onScrollChange(accumulatedTouchOffsetDegrees);
return true;
default:
return false;
}
}
@BinderThread
public void setRoll(float roll) {
// We compensate for roll by rotating in the opposite direction.
this.roll = -roll;
}
}

View file

@ -1,156 +0,0 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests for {@link Mesh}. */
@RunWith(RobolectricTestRunner.class)
public class MeshTest {
private static final float EPSILON = .00001f;
// This is a copy of Mesh.COORDS_PER_VERTEX which is private.
private static final int COORDS_PER_VERTEX = 7;
// Default 360 sphere.
private static final float RADIUS = 1;
private static final int LATITUDES = 12;
private static final int LONGITUDES = 24;
private static final float VERTICAL_FOV_DEGREES = 180;
private static final float HORIZONTAL_FOV_DEGREES = 360;
@Test
public void testSphericalMesh() throws Exception {
// Only the first param is important in this test.
float[] data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LONGITUDES,
VERTICAL_FOV_DEGREES,
HORIZONTAL_FOV_DEGREES,
Mesh.MEDIA_STEREO_TOP_BOTTOM);
assertThat(data.length).isGreaterThan(LATITUDES * LONGITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length / COORDS_PER_VERTEX; ++i) {
float x = data[i * COORDS_PER_VERTEX + 0];
float y = data[i * COORDS_PER_VERTEX + 1];
float z = data[i * COORDS_PER_VERTEX + 2];
assertEquals(RADIUS, Math.sqrt(x * x + y * y + z * z), EPSILON);
}
}
@Test
public void testMeshTextureCoordinates() throws Exception {
// 360 mono video.
float[] data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LONGITUDES,
VERTICAL_FOV_DEGREES,
HORIZONTAL_FOV_DEGREES,
Mesh.MEDIA_MONOSCOPIC);
// There should be more vertices than quads.
assertThat(data.length).isGreaterThan(LATITUDES * LONGITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// For monoscopic meshes, the (3, 4) and (5, 6) tex coords in each vertex should be the same.
assertEquals(data[i + 3], data[i + 5], EPSILON);
assertEquals(data[i + 4], data[i + 6], EPSILON);
}
// Hemispherical stereo where longitudes := latitudes. This is not exactly Wally format, but
// it's close.
data =
Mesh.createUvSphereVertexData(
RADIUS,
LATITUDES,
LATITUDES,
VERTICAL_FOV_DEGREES,
VERTICAL_FOV_DEGREES,
Mesh.MEDIA_STEREO_LEFT_RIGHT);
assertThat(data.length).isGreaterThan(LATITUDES * LATITUDES * COORDS_PER_VERTEX);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// U coordinates should be on the left & right halves of the texture.
assertThat(data[i + 3]).isAtMost(.5f);
assertThat(data[i + 5]).isAtLeast(.5f);
// V coordinates should be the same.
assertEquals(data[i + 4], data[i + 6], EPSILON);
}
// Flat stereo.
data =
Mesh.createUvSphereVertexData(
RADIUS,
1,
1, // Single quad.
30,
60, // Approximate "cinematic" screen.
Mesh.MEDIA_STEREO_TOP_BOTTOM);
assertEquals(0, data.length % COORDS_PER_VERTEX);
for (int i = 0; i < data.length; i += COORDS_PER_VERTEX) {
// U coordinates should be the same
assertEquals(data[i + 3], data[i + 5], EPSILON);
// V coordinates should be on the top & bottom halves of the texture.
assertThat(data[i + 4]).isAtMost(.5f);
assertThat(data[i + 6]).isAtLeast(.5f);
}
}
@Test
public void testArgumentValidation() {
checkIllegalArgumentException(0, 1, 1, 1, 1);
checkIllegalArgumentException(1, 0, 1, 1, 1);
checkIllegalArgumentException(1, 1, 0, 1, 1);
checkIllegalArgumentException(1, 1, 1, 0, 1);
checkIllegalArgumentException(1, 1, 1, 181, 1);
checkIllegalArgumentException(1, 1, 1, 1, 0);
checkIllegalArgumentException(1, 1, 1, 1, 361);
}
private void checkIllegalArgumentException(
float radius,
int latitudes,
int longitudes,
float verticalFovDegrees,
float horizontalFovDegrees) {
try {
Mesh.createUvSphereVertexData(
radius,
latitudes,
longitudes,
verticalFovDegrees,
horizontalFovDegrees,
Mesh.MEDIA_MONOSCOPIC);
fail();
} catch (IllegalArgumentException e) {
// Do nothing. Expected.
}
}
}

View file

@ -1,156 +0,0 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.ui.spherical;
import static com.google.common.truth.Truth.assertThat;
import android.view.MotionEvent;
import com.google.android.exoplayer2.ui.spherical.SphericalSurfaceView.TouchTracker;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Tests the interaction between the View's input (TouchTracker) and output (Renderer). */
@RunWith(RobolectricTestRunner.class)
public class SphericalSurfaceViewTouchTrackerTest {
private static final float EPSILON = 0.00001f;
private static final int SWIPE_PX = 100;
private static class MockRenderer extends SphericalSurfaceView.Renderer {
private float yaw;
private float pitch;
public MockRenderer() {
super(null);
}
@Override
public synchronized void setPitchOffset(float pitch) {
this.pitch = pitch;
}
@Override
public synchronized void setYawOffset(float yaw) {
this.yaw = yaw;
}
};
private final MockRenderer mockRenderer = new MockRenderer();
private TouchTracker tracker;
private static void swipe(TouchTracker tracker, float x0, float y0, float x1, float y1) {
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, x0, y0, 0));
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, x1, y1, 0));
tracker.onTouch(null, MotionEvent.obtain(0, 0, MotionEvent.ACTION_UP, x1, y1, 0));
}
@Before
public void setUp() {
tracker = new TouchTracker(mockRenderer);
}
@Test
public void testTap() {
// Tap is a noop.
swipe(tracker, 0, 0, 0, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testBasicYaw() {
swipe(tracker, 0, 0, SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-SWIPE_PX / TouchTracker.PX_PER_DEGREES);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testBigYaw() {
swipe(tracker, 0, 0, -10 * SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isEqualTo(10 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(0);
}
@Test
public void testYawUnaffectedByPitch() {
swipe(tracker, 0, 0, 0, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
swipe(tracker, 0, 0, SWIPE_PX, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testBasicPitch() {
swipe(tracker, 0, 0, 0, SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testPitchClipped() {
// Big reverse pitch should be clipped.
swipe(tracker, 0, 0, 0, -20 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isEqualTo(-TouchTracker.MAX_PITCH_DEGREES);
// Big forward pitch should be clipped.
swipe(tracker, 0, 0, 0, 50 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(0);
assertThat(mockRenderer.pitch).isEqualTo(TouchTracker.MAX_PITCH_DEGREES);
}
@Test
public void testWithRoll90() {
tracker.setRoll((float) Math.toRadians(90));
// Y-axis should now control yaw.
swipe(tracker, 0, 0, 0, 2 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// X-axis should now control reverse pitch.
swipe(tracker, 0, 0, -3 * SWIPE_PX, 0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testWithRoll180() {
tracker.setRoll((float) Math.toRadians(180));
// X-axis should now control reverse yaw.
swipe(tracker, 0, 0, -2 * SWIPE_PX, 0);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// Y-axis should now control reverse pitch.
swipe(tracker, 0, 0, 0, -3 * SWIPE_PX);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
@Test
public void testWithRoll270() {
tracker.setRoll((float) Math.toRadians(270));
// Y-axis should now control reverse yaw.
swipe(tracker, 0, 0, 0, -2 * SWIPE_PX);
assertThat(mockRenderer.yaw).isWithin(EPSILON).of(-2 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
// X-axis should now control pitch.
swipe(tracker, 0, 0, 3 * SWIPE_PX, 0);
assertThat(mockRenderer.pitch).isWithin(EPSILON).of(3 * SWIPE_PX / TouchTracker.PX_PER_DEGREES);
}
}