Skip to content
This repository has been archived by the owner on Jul 18, 2018. It is now read-only.

Commit

Permalink
WebVR: add angular velocity estimate to pose
Browse files Browse the repository at this point in the history
The WebVR spec supports angular velocity as an optional field. We're not
currently supplying that, but we need the data for motion-to-photon latency
testing.

See also immersive-web/webxr#212 for a spec clarification
request. Currently, this is returning sensor-relative velocity, may need
to be transformed to a static reference frame.

BUG=705084

Review-Url: https://codereview.chromium.org/2770353002
Cr-Commit-Position: refs/heads/master@{#462331}
  • Loading branch information
klausw authored and Commit bot committed Apr 6, 2017
1 parent d8c43f8 commit 3150789
Show file tree
Hide file tree
Showing 5 changed files with 150 additions and 40 deletions.
13 changes: 2 additions & 11 deletions chrome/browser/android/vr_shell/non_presenting_gvr_delegate.cc
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,6 @@

namespace vr_shell {

namespace {
static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000;
} // namespace

NonPresentingGvrDelegate::NonPresentingGvrDelegate(gvr_context* context)
: task_runner_(base::ThreadTaskRunnerHandle::Get()),
binding_(this),
Expand Down Expand Up @@ -141,19 +137,14 @@ void NonPresentingGvrDelegate::UpdateVSyncInterval(int64_t timebase_nanos,

void NonPresentingGvrDelegate::SendVSync(base::TimeDelta time,
const GetVSyncCallback& callback) {
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;

if (!gvr_api_) {
callback.Run(device::mojom::VRPosePtr(nullptr), time, -1,
device::mojom::VRVSyncProvider::Status::SUCCESS);
return;
}

gvr::Mat4f head_mat = gvr_api_->ApplyNeckModel(
gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f);
callback.Run(GvrDelegate::VRPosePtrFromGvrPose(head_mat), time, -1,
device::mojom::VRVSyncProvider::Status::SUCCESS);
callback.Run(GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_.get(), nullptr),
time, -1, device::mojom::VRVSyncProvider::Status::SUCCESS);
}

void NonPresentingGvrDelegate::CreateVRDisplayInfo(
Expand Down
13 changes: 9 additions & 4 deletions chrome/browser/android/vr_shell/vr_controller.cc
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,12 @@ device::GvrGamepadData VrController::GetGamepadData() {
pad.timestamp = controller_state_->GetLastOrientationTimestamp();
pad.touch_pos = controller_state_->GetTouchPos();
pad.orientation = controller_state_->GetOrientation();
pad.accel = controller_state_->GetAccel();
pad.gyro = controller_state_->GetGyro();

// Use orientation to rotate acceleration/gyro into seated space.
gvr::Mat4f pose_mat = QuatToMatrix(pad.orientation);
pad.accel = MatrixVectorMul(pose_mat, controller_state_->GetAccel());
pad.gyro = MatrixVectorMul(pose_mat, controller_state_->GetGyro());

pad.is_touching = controller_state_->IsTouching();
pad.controller_button_pressed =
controller_state_->GetButtonState(GVR_CONTROLLER_BUTTON_CLICK);
Expand Down Expand Up @@ -234,9 +238,10 @@ void VrController::Initialize(gvr_context* gvr_context) {

int32_t options = gvr::ControllerApi::DefaultOptions();

// Enable non-default options - WebVR needs gyro, and since VrShell
// implements GvrGamepadDataProvider we need this always.
// Enable non-default options - WebVR needs gyro and linear acceleration, and
// since VrShell implements GvrGamepadDataProvider we need this always.
options |= GVR_CONTROLLER_ENABLE_GYRO;
options |= GVR_CONTROLLER_ENABLE_ACCEL;

CHECK(controller_api_->Init(options, gvr_context));

Expand Down
31 changes: 6 additions & 25 deletions chrome/browser/android/vr_shell/vr_shell_gl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,6 @@
namespace vr_shell {

namespace {
// TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever
// exposed, use that instead (it defaults to 50ms on most platforms).
static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000;

static constexpr float kZNear = 0.1f;
static constexpr float kZFar = 1000.0f;

Expand Down Expand Up @@ -808,19 +804,7 @@ void VrShellGl::DrawFrame(int16_t frame_index) {
"kPoseRingBufferSize must be a power of 2");
head_pose = webvr_head_pose_[frame_index % kPoseRingBufferSize];
} else {
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
}

gvr::Vec3f position = GetTranslation(head_pose);
if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
// This appears to be a 3DOF pose without a neck model. Add one.
// The head pose has redundant data. Assume we're only using the
// object_from_reference_matrix, we're not updating position_external.
// TODO: Not sure what object_from_reference_matrix is. The new api removed
// it. For now, removing it seems working fine.
gvr_api_->ApplyNeckModel(head_pose, 1.0f);
head_pose = device::GvrDelegate::GetGvrPoseWithNeckModel(gvr_api_.get());
}

// Update the render position of all UI elements (including desktop).
Expand Down Expand Up @@ -1278,17 +1262,14 @@ void VrShellGl::SendVSync(base::TimeDelta time,

TRACE_EVENT1("input", "VrShellGl::SendVSync", "frame", frame_index);

gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;

gvr::Mat4f head_mat =
gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
head_mat = gvr_api_->ApplyNeckModel(head_mat, 1.0f);
gvr::Mat4f head_mat;
device::mojom::VRPosePtr pose =
device::GvrDelegate::GetVRPosePtrWithNeckModel(gvr_api_.get(), &head_mat);

webvr_head_pose_[frame_index % kPoseRingBufferSize] = head_mat;

callback.Run(device::GvrDelegate::VRPosePtrFromGvrPose(head_mat), time,
frame_index, device::mojom::VRVSyncProvider::Status::SUCCESS);
callback.Run(std::move(pose), time, frame_index,
device::mojom::VRVSyncProvider::Status::SUCCESS);
}

void VrShellGl::CreateVRDisplayInfo(
Expand Down
130 changes: 130 additions & 0 deletions device/vr/android/gvr/gvr_delegate.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,93 @@ namespace {
// twice before handing off to GVR. For comparison, the polyfill
// uses approximately 0.55 on a Pixel XL.
static constexpr float kWebVrRecommendedResolutionScale = 0.5;

// TODO(mthiesse): If gvr::PlatformInfo().GetPosePredictionTime() is ever
// exposed, use that instead (it defaults to 50ms on most platforms).
static constexpr int64_t kPredictionTimeWithoutVsyncNanos = 50000000;

// Time offset used for calculating angular velocity from a pair of predicted
// poses. The precise value shouldn't matter as long as it's nonzero and much
// less than a frame.
static constexpr int64_t kAngularVelocityEpsilonNanos = 1000000;

// Matrix math copied from vr_shell's vr_math.cc, can't use that here
// due to dependency ordering. TODO(mthiesse): move the vr_math code
// to this directory so that both locations can use it.

// Rotation only, ignore translation components.
gvr::Vec3f MatrixVectorRotate(const gvr::Mat4f& m, const gvr::Vec3f& v) {
gvr::Vec3f res;
res.x = m.m[0][0] * v.x + m.m[0][1] * v.y + m.m[0][2] * v.z;
res.y = m.m[1][0] * v.x + m.m[1][1] * v.y + m.m[1][2] * v.z;
res.z = m.m[2][0] * v.x + m.m[2][1] * v.y + m.m[2][2] * v.z;
return res;
}

gvr::Mat4f MatrixMul(const gvr::Mat4f& matrix1, const gvr::Mat4f& matrix2) {
gvr::Mat4f result;
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
result.m[i][j] = 0.0f;
for (int k = 0; k < 4; ++k) {
result.m[i][j] += matrix1.m[i][k] * matrix2.m[k][j];
}
}
}
return result;
}

gvr::Vec3f GetAngularVelocityFromPoses(gvr::Mat4f head_mat,
gvr::Mat4f head_mat_2,
double epsilon_seconds) {
// The angular velocity is a 3-element vector pointing along the rotation
// axis with magnitude equal to rotation speed in radians/second, expressed
// in the seated frame of reference.
//
// The 1.1 spec isn't very clear on details, clarification requested in
// https://github.com/w3c/webvr/issues/212 . For now, assuming that we
// want a vector in the sitting reference frame.
//
// Assuming that pose prediction is simply based on adding a time * angular
// velocity rotation to the pose, we can approximate the angular velocity
// from the difference between two successive poses. This is a first order
// estimate that assumes small enough rotations so that we can do linear
// approximation.
//
// See:
// https://en.wikipedia.org/wiki/Angular_velocity#Calculation_from_the_orientation_matrix

gvr::Mat4f delta_mat;
gvr::Mat4f inverse_head_mat;
// Calculate difference matrix, and inverse head matrix rotation.
// For the inverse rotation, just transpose the 3x3 subsection.
//
// Assume that epsilon is nonzero since it's based on a compile-time constant
// provided by the caller.
for (int j = 0; j < 3; ++j) {
for (int i = 0; i < 3; ++i) {
delta_mat.m[j][i] =
(head_mat_2.m[j][i] - head_mat.m[j][i]) / epsilon_seconds;
inverse_head_mat.m[j][i] = head_mat.m[i][j];
}
delta_mat.m[j][3] = delta_mat.m[3][j] = 0.0;
inverse_head_mat.m[j][3] = inverse_head_mat.m[3][j] = 0.0;
}
delta_mat.m[3][3] = 1.0;
inverse_head_mat.m[3][3] = 1.0;
gvr::Mat4f omega_mat = device::MatrixMul(delta_mat, inverse_head_mat);
gvr::Vec3f omega_vec;
omega_vec.x = -omega_mat.m[2][1];
omega_vec.y = omega_mat.m[2][0];
omega_vec.z = -omega_mat.m[1][0];

// Rotate by inverse head matrix to bring into seated space.
gvr::Vec3f angular_velocity =
device::MatrixVectorRotate(inverse_head_mat, omega_vec);

return angular_velocity;
}

} // namespace

/* static */
Expand Down Expand Up @@ -51,6 +138,49 @@ mojom::VRPosePtr GvrDelegate::VRPosePtrFromGvrPose(gvr::Mat4f head_mat) {
return pose;
}

/* static */
gvr::Mat4f GvrDelegate::GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api) {
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;

gvr::Mat4f head_mat = gvr_api->ApplyNeckModel(
gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f);

return head_mat;
}

/* static */
mojom::VRPosePtr GvrDelegate::GetVRPosePtrWithNeckModel(
gvr::GvrApi* gvr_api,
gvr::Mat4f* head_mat_out) {
gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;

gvr::Mat4f head_mat = gvr_api->ApplyNeckModel(
gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time), 1.0f);

if (head_mat_out)
*head_mat_out = head_mat;

mojom::VRPosePtr pose = GvrDelegate::VRPosePtrFromGvrPose(head_mat);

// Get a second pose a bit later to calculate angular velocity.
target_time.monotonic_system_time_nanos += kAngularVelocityEpsilonNanos;
gvr::Mat4f head_mat_2 =
gvr_api->GetHeadSpaceFromStartSpaceRotation(target_time);

// Add headset angular velocity to the pose.
pose->angularVelocity.emplace(3);
double epsilon_seconds = kAngularVelocityEpsilonNanos * 1e-9;
gvr::Vec3f angular_velocity =
GetAngularVelocityFromPoses(head_mat, head_mat_2, epsilon_seconds);
pose->angularVelocity.value()[0] = angular_velocity.x;
pose->angularVelocity.value()[1] = angular_velocity.y;
pose->angularVelocity.value()[2] = angular_velocity.z;

return pose;
}

/* static */
gvr::Sizei GvrDelegate::GetRecommendedWebVrSize(gvr::GvrApi* gvr_api) {
// Pick a reasonable default size for the WebVR transfer surface
Expand Down
3 changes: 3 additions & 0 deletions device/vr/android/gvr/gvr_delegate.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ namespace device {
class DEVICE_VR_EXPORT GvrDelegate {
public:
static mojom::VRPosePtr VRPosePtrFromGvrPose(gvr::Mat4f head_mat);
static gvr::Mat4f GetGvrPoseWithNeckModel(gvr::GvrApi* gvr_api);
static mojom::VRPosePtr GetVRPosePtrWithNeckModel(gvr::GvrApi* gvr_api,
gvr::Mat4f* head_mat_out);
static gvr::Sizei GetRecommendedWebVrSize(gvr::GvrApi* gvr_api);
static mojom::VRDisplayInfoPtr CreateVRDisplayInfo(
gvr::GvrApi* gvr_api,
Expand Down

0 comments on commit 3150789

Please sign in to comment.