initial commit

This commit is contained in:
Jo 2025-01-07 02:06:59 +01:00
parent 6715289efe
commit 788c3389af
37645 changed files with 2526849 additions and 80 deletions

View file

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7f3098a431eba6a4abbb32c7e344d73b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,641 @@
// Copyright 2016 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Modified by Unity from original:
// https://github.com/googlevr/gvr-unity-sdk/blob/master/Assets/GoogleVR/Scripts/Controller/ArmModel/GvrArmModel.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
#if ENABLE_VR || ENABLE_AR
using UnityEngine.SpatialTracking;
using UnityEngine.Experimental.XR.Interaction;
namespace UnityEngine.XR.LegacyInputHelpers
{
public class ArmModel : BasePoseProvider
{
/// <summary> Gets the Pose value from the calculated arm model. as the model returns both position and rotation in all cases, we set both flags on return if successful.</summary>
public override PoseDataFlags GetPoseFromProvider(out Pose output)
{
if (OnControllerInputUpdated())
{
output = finalPose;
return PoseDataFlags.Position | PoseDataFlags.Rotation;
}
output = Pose.identity;
return PoseDataFlags.NoData;
}
Pose m_FinalPose;
/// <summary>
/// the pose which represents the final tracking result of the arm model
/// </summary>
public Pose finalPose
{
get { return m_FinalPose; }
set { m_FinalPose = value; }
}
[SerializeField]
XRNode m_PoseSource = XRNode.LeftHand;
/// <summary>
/// the pose to use as the input 3DOF position
/// </summary>
public XRNode poseSource
{
get { return m_PoseSource; }
set { m_PoseSource = value; }
}
[SerializeField]
XRNode m_HeadPoseSource = XRNode.CenterEye;
/// <summary>
/// The game object which represents the "head" position of the user
/// </summary>
public XRNode headGameObject
{
get { return m_HeadPoseSource; }
set { m_HeadPoseSource = value; }
}
/// Standard implementation for a mathematical model to make the virtual controller approximate the
/// physical location of the Daydream controller.
[SerializeField]
Vector3 m_ElbowRestPosition = DEFAULT_ELBOW_REST_POSITION;
/// <summary>
/// Position of the elbow joint relative to the head before the arm model is applied.
/// </summary>
public Vector3 elbowRestPosition
{
get { return m_ElbowRestPosition; }
set { m_ElbowRestPosition = value; }
}
[SerializeField]
Vector3 m_WristRestPosition = DEFAULT_WRIST_REST_POSITION;
/// <summary>
/// Position of the wrist joint relative to the elbow before the arm model is applied.
/// </summary>
public Vector3 wristRestPosition
{
get { return m_WristRestPosition; }
set { m_WristRestPosition = value; }
}
[SerializeField]
Vector3 m_ControllerRestPosition = DEFAULT_CONTROLLER_REST_POSITION;
/// <summary>
/// Position of the controller joint relative to the wrist before the arm model is applied.
/// </summary>
public Vector3 controllerRestPosition
{
get { return m_ControllerRestPosition; }
set { m_ControllerRestPosition = value; }
}
[SerializeField]
Vector3 m_ArmExtensionOffset = DEFAULT_ARM_EXTENSION_OFFSET;
/// <summary>
/// Offset applied to the elbow position as the controller is rotated upwards.
/// </summary>
public Vector3 armExtensionOffset
{
get { return m_ArmExtensionOffset; }
set { m_ArmExtensionOffset = value; }
}
[Range(0.0f, 1.0f)]
[SerializeField]
float m_ElbowBendRatio = DEFAULT_ELBOW_BEND_RATIO;
/// <summary>
/// Ratio of the controller's rotation to apply to the rotation of the elbow.
/// The remaining rotation is applied to the wrist's rotation.
/// </summary>
public float elbowBendRatio
{
get { return m_ElbowBendRatio; }
set { m_ElbowBendRatio = value; }
}
[SerializeField]
bool m_IsLockedToNeck = true;
/// <summary>
/// If true, the root of the pose is locked to the local position of the player's neck.
/// </summary>
public bool isLockedToNeck
{
get { return m_IsLockedToNeck; }
set { m_IsLockedToNeck = value; }
}
/// Represent the neck's position relative to the user's head.
/// If isLockedToNeck is true, this will be the InputTracking position of the Head node modified
/// by an inverse neck model to approximate the neck position.
/// Otherwise, it is always zero.
public Vector3 neckPosition
{
get
{
return m_NeckPosition;
}
}
/// Represent the shoulder's position relative to the user's head.
/// This is not actually used as part of the arm model calculations, and exists for debugging.
public Vector3 shoulderPosition
{
get
{
Vector3 retVal = m_NeckPosition + m_TorsoRotation * Vector3.Scale(SHOULDER_POSITION, m_HandedMultiplier);
return retVal;
}
}
/// Represent the shoulder's rotation relative to the user's head.
/// This is not actually used as part of the arm model calculations, and exists for debugging.
public Quaternion shoulderRotation
{
get
{
return m_TorsoRotation;
}
}
/// Represent the elbow's position relative to the user's head.
public Vector3 elbowPosition
{
get
{
return m_ElbowPosition;
}
}
/// Represent the elbow's rotation relative to the user's head.
public Quaternion elbowRotation
{
get
{
return m_ElbowRotation;
}
}
/// Represent the wrist's position relative to the user's head.
public Vector3 wristPosition
{
get
{
return m_WristPosition;
}
}
/// Represent the wrist's rotation relative to the user's head.
public Quaternion wristRotation
{
get
{
return m_WristRotation;
}
}
/// Represent the controller's position relative to the head pose
public Vector3 controllerPosition
{
get
{
return m_ControllerPosition;
}
}
/// Represent the controllers rotation relative to the user's head.
public Quaternion controllerRotation
{
get
{
return m_ControllerRotation;
}
}
#if UNITY_EDITOR
/// Editor only API to allow querying the torso forward direction
public Vector3 torsoDirection
{
get { return m_TorsoDirection; }
}
/// Editor only API to allow querying the torso rotation
public Quaternion torsoRotation
{
get { return m_TorsoRotation; }
}
#endif
protected Vector3 m_NeckPosition;
protected Vector3 m_ElbowPosition;
protected Quaternion m_ElbowRotation;
protected Vector3 m_WristPosition;
protected Quaternion m_WristRotation;
protected Vector3 m_ControllerPosition;
protected Quaternion m_ControllerRotation;
/// Multiplier for handedness such that 1 = Right, 0 = Center, -1 = left.
protected Vector3 m_HandedMultiplier;
/// Forward direction of user's torso.
protected Vector3 m_TorsoDirection;
/// Orientation of the user's torso.
protected Quaternion m_TorsoRotation;
// Default values for tuning variables.
protected static readonly Vector3 DEFAULT_ELBOW_REST_POSITION = new Vector3(0.195f, -0.5f, 0.005f);
protected static readonly Vector3 DEFAULT_WRIST_REST_POSITION = new Vector3(0.0f, 0.0f, 0.25f);
protected static readonly Vector3 DEFAULT_CONTROLLER_REST_POSITION = new Vector3(0.0f, 0.0f, 0.05f);
protected static readonly Vector3 DEFAULT_ARM_EXTENSION_OFFSET = new Vector3(-0.13f, 0.14f, 0.08f);
protected const float DEFAULT_ELBOW_BEND_RATIO = 0.6f;
/// Increases elbow bending as the controller moves up (unitless).
protected const float EXTENSION_WEIGHT = 0.4f;
/// Rest position for shoulder joint.
protected static readonly Vector3 SHOULDER_POSITION = new Vector3(0.17f, -0.2f, -0.03f);
/// Neck offset used to apply the inverse neck model when locked to the head.
protected static readonly Vector3 NECK_OFFSET = new Vector3(0.0f, 0.075f, 0.08f);
/// Angle ranges the for arm extension offset to start and end (degrees).
protected const float MIN_EXTENSION_ANGLE = 7.0f;
protected const float MAX_EXTENSION_ANGLE = 60.0f;
protected virtual void OnEnable()
{
// Force the torso direction to match the gaze direction immediately.
// Otherwise, the controller will not be positioned correctly if the ArmModel was enabled
// when the user wasn't facing forward.
UpdateTorsoDirection(true);
// Update immediately to avoid a frame delay before the arm model is applied.
OnControllerInputUpdated();
}
protected virtual void OnDisable()
{
}
public virtual bool OnControllerInputUpdated()
{
UpdateHandedness();
if (UpdateTorsoDirection(false))
{
if (UpdateNeckPosition())
{
if (ApplyArmModel())
{
return true;
}
}
}
return false;
}
protected virtual void UpdateHandedness()
{
// Determine handedness multiplier.
m_HandedMultiplier.Set(0, 1, 1);
if (m_PoseSource == XRNode.RightHand || m_PoseSource == XRNode.TrackingReference)
{
m_HandedMultiplier.x = 1.0f;
}
else if (m_PoseSource == XRNode.LeftHand)
{
m_HandedMultiplier.x = -1.0f;
}
}
protected virtual bool UpdateTorsoDirection(bool forceImmediate)
{
// Determine the gaze direction horizontally.
Vector3 gazeDirection = new Vector3();
if (TryGetForwardVector(m_HeadPoseSource, out gazeDirection))
{
gazeDirection.y = 0.0f;
gazeDirection.Normalize();
// Use the gaze direction to update the forward direction.
if (forceImmediate)
{
m_TorsoDirection = gazeDirection;
}
else
{
Vector3 angAccel;
if (TryGetAngularAcceleration(poseSource, out angAccel))
{
float angularVelocity = angAccel.magnitude;
float gazeFilterStrength = Mathf.Clamp((angularVelocity - 0.2f) / 45.0f, 0.0f, 0.1f);
m_TorsoDirection = Vector3.Slerp(m_TorsoDirection, gazeDirection, gazeFilterStrength);
}
}
// Calculate the torso rotation.
m_TorsoRotation = Quaternion.FromToRotation(Vector3.forward, m_TorsoDirection);
return true;
}
return false;
}
protected virtual bool UpdateNeckPosition()
{
if (m_IsLockedToNeck && TryGetPosition(m_HeadPoseSource, out m_NeckPosition))
{
// Find the approximate neck position by Applying an inverse neck model.
// This transforms the head position to the center of the head and also accounts
// for the head's rotation so that the motion feels more natural.
return ApplyInverseNeckModel(m_NeckPosition, out m_NeckPosition);
}
else
{
m_NeckPosition = Vector3.zero;
return true;
}
}
protected virtual bool ApplyArmModel()
{
// Set the starting positions of the joints before they are transformed by the arm model.
SetUntransformedJointPositions();
// Get the controller's orientation.
Quaternion controllerOrientation;
Quaternion xyRotation;
float xAngle;
if (GetControllerRotation(out controllerOrientation, out xyRotation, out xAngle))
{
// Offset the elbow by the extension offset.
float extensionRatio = CalculateExtensionRatio(xAngle);
ApplyExtensionOffset(extensionRatio);
// Calculate the lerp rotation, which is used to control how much the rotation of the
// controller impacts each joint.
Quaternion lerpRotation = CalculateLerpRotation(xyRotation, extensionRatio);
CalculateFinalJointRotations(controllerOrientation, xyRotation, lerpRotation);
ApplyRotationToJoints();
m_FinalPose.position = m_ControllerPosition;
m_FinalPose.rotation = m_ControllerRotation;
return true;
}
return false;
}
/// Set the starting positions of the joints before they are transformed by the arm model.
protected virtual void SetUntransformedJointPositions()
{
m_ElbowPosition = Vector3.Scale(m_ElbowRestPosition, m_HandedMultiplier);
m_WristPosition = Vector3.Scale(m_WristRestPosition, m_HandedMultiplier);
m_ControllerPosition = Vector3.Scale(m_ControllerRestPosition, m_HandedMultiplier);
}
/// Calculate the extension ratio based on the angle of the controller along the x axis.
protected virtual float CalculateExtensionRatio(float xAngle)
{
float normalizedAngle = (xAngle - MIN_EXTENSION_ANGLE) / (MAX_EXTENSION_ANGLE - MIN_EXTENSION_ANGLE);
float extensionRatio = Mathf.Clamp(normalizedAngle, 0.0f, 1.0f);
return extensionRatio;
}
/// Offset the elbow by the extension offset.
protected virtual void ApplyExtensionOffset(float extensionRatio)
{
Vector3 extensionOffset = Vector3.Scale(m_ArmExtensionOffset, m_HandedMultiplier);
m_ElbowPosition += extensionOffset * extensionRatio;
}
/// Calculate the lerp rotation, which is used to control how much the rotation of the
/// controller impacts each joint.
protected virtual Quaternion CalculateLerpRotation(Quaternion xyRotation, float extensionRatio)
{
float totalAngle = Quaternion.Angle(xyRotation, Quaternion.identity);
float lerpSuppresion = 1.0f - Mathf.Pow(totalAngle / 180.0f, 6.0f);
float inverseElbowBendRatio = 1.0f - m_ElbowBendRatio;
float lerpValue = inverseElbowBendRatio + m_ElbowBendRatio * extensionRatio * EXTENSION_WEIGHT;
lerpValue *= lerpSuppresion;
return Quaternion.Lerp(Quaternion.identity, xyRotation, lerpValue);
}
/// Determine the final joint rotations relative to the head.
protected virtual void CalculateFinalJointRotations(Quaternion controllerOrientation, Quaternion xyRotation, Quaternion lerpRotation)
{
m_ElbowRotation = m_TorsoRotation * Quaternion.Inverse(lerpRotation) * xyRotation;
m_WristRotation = m_ElbowRotation * lerpRotation;
m_ControllerRotation = m_TorsoRotation * controllerOrientation;
}
/// Apply the joint rotations to the positions of the joints to determine the final pose.
protected virtual void ApplyRotationToJoints()
{
m_ElbowPosition = m_NeckPosition + m_TorsoRotation * m_ElbowPosition;
m_WristPosition = m_ElbowPosition + m_ElbowRotation * m_WristPosition;
m_ControllerPosition = m_WristPosition + m_WristRotation * m_ControllerPosition;
}
/// Transform the head position into an approximate neck position.
protected virtual bool ApplyInverseNeckModel(Vector3 headPosition, out Vector3 calculatedPosition)
{
// Determine the gaze direction horizontally.
Quaternion headRotation = new Quaternion();
if (TryGetRotation(m_HeadPoseSource, out headRotation))
{
Vector3 rotatedNeckOffset =
headRotation * NECK_OFFSET - NECK_OFFSET.y * Vector3.up;
headPosition -= rotatedNeckOffset;
calculatedPosition = headPosition;
return true;
}
calculatedPosition = Vector3.zero;
return false;
}
protected bool TryGetForwardVector(XRNode node, out Vector3 forward)
{
Pose tmpPose = new Pose();
if (TryGetRotation(node, out tmpPose.rotation) &&
TryGetPosition(node, out tmpPose.position))
{
forward = tmpPose.forward;
return true;
}
forward = Vector3.zero;
return false;
}
List<XR.XRNodeState> xrNodeStateListOrientation = new List<XRNodeState>();
protected bool TryGetRotation(XRNode node, out Quaternion rotation)
{
XR.InputTracking.GetNodeStates(xrNodeStateListOrientation);
var length = xrNodeStateListOrientation.Count;
XRNodeState nodeState;
for (int i = 0; i < length; ++i)
{
nodeState = xrNodeStateListOrientation[i];
if (nodeState.nodeType == node)
{
if (nodeState.TryGetRotation(out rotation))
{
return true;
}
}
}
rotation = Quaternion.identity;
return false;
}
List<XR.XRNodeState> xrNodeStateListPosition = new List<XRNodeState>();
protected bool TryGetPosition(XRNode node, out Vector3 position)
{
XR.InputTracking.GetNodeStates(xrNodeStateListPosition);
var length = xrNodeStateListPosition.Count;
XRNodeState nodeState;
for (int i = 0; i < length; ++i)
{
nodeState = xrNodeStateListPosition[i];
if (nodeState.nodeType == node)
{
if (nodeState.TryGetPosition(out position))
{
return true;
}
}
}
position = Vector3.zero;
return false;
}
List<XR.XRNodeState> xrNodeStateListAngularAcceleration = new List<XRNodeState>();
protected bool TryGetAngularAcceleration(XRNode node, out Vector3 angularAccel)
{
XR.InputTracking.GetNodeStates(xrNodeStateListAngularAcceleration);
var length = xrNodeStateListAngularAcceleration.Count;
XRNodeState nodeState;
for (int i = 0; i < length; ++i)
{
nodeState = xrNodeStateListAngularAcceleration[i];
if (nodeState.nodeType == node)
{
if (nodeState.TryGetAngularAcceleration(out angularAccel))
{
return true;
}
}
}
angularAccel = Vector3.zero;
return false;
}
List<XR.XRNodeState> xrNodeStateListAngularVelocity = new List<XRNodeState>();
protected bool TryGetAngularVelocity(XRNode node, out Vector3 angVel)
{
XR.InputTracking.GetNodeStates(xrNodeStateListAngularVelocity);
var length = xrNodeStateListAngularVelocity.Count;
XRNodeState nodeState;
for (int i = 0; i < length; ++i)
{
nodeState = xrNodeStateListAngularVelocity[i];
if (nodeState.nodeType == node)
{
if (nodeState.TryGetAngularVelocity(out angVel))
{
return true;
}
}
}
angVel = Vector3.zero;
return false;
}
/// Get the controller's orientation.
protected bool GetControllerRotation(out Quaternion rotation, out Quaternion xyRotation, out float xAngle)
{
// Find the controller's orientation relative to the player.
if (TryGetRotation(poseSource, out rotation))
{
rotation = Quaternion.Inverse(m_TorsoRotation) * rotation;
// Extract just the x rotation angle.
Vector3 controllerForward = rotation * Vector3.forward;
xAngle = 90.0f - Vector3.Angle(controllerForward, Vector3.up);
// Remove the z rotation from the controller.
xyRotation = Quaternion.FromToRotation(Vector3.forward, controllerForward);
return true;
}
else
{
rotation = Quaternion.identity;
xyRotation = Quaternion.identity;
xAngle = 0.0f;
return false;
}
}
#if UNITY_EDITOR
/// <summary>
/// Editor only API to draw debug gizmos to help visualize the arm model
/// </summary>
public virtual void OnDrawGizmos()
{
if (!enabled)
{
return;
}
if (transform.parent == null) {
return;
}
Vector3 worldShoulder = transform.parent.TransformPoint(shoulderPosition);
Vector3 worldElbow = transform.parent.TransformPoint(elbowPosition);
Vector3 worldwrist = transform.parent.TransformPoint(wristPosition);
Vector3 worldcontroller = transform.parent.TransformPoint(controllerPosition);
Gizmos.color = Color.red;
Gizmos.DrawSphere(worldShoulder, 0.02f);
Gizmos.DrawLine(worldShoulder, worldElbow);
Gizmos.color = Color.green;
Gizmos.DrawSphere(worldElbow, 0.02f);
Gizmos.DrawLine(worldElbow, worldwrist);
Gizmos.color = Color.cyan;
Gizmos.DrawSphere(worldwrist, 0.02f);
Gizmos.color = Color.blue;
Gizmos.DrawSphere(worldcontroller, 0.02f);
}
#endif // UNITY_EDITOR
}
}
#endif

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 1ed4e84183ad15c43b32a13aeca25b98
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,167 @@
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Modified by Unity from originals located
// https://github.com/googlevr/daydream-elements/blob/master/Assets/DaydreamElements/Elements/ArmModels/Scripts/ArmModels/SwingArmModel.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
#if ENABLE_VR || ENABLE_AR
using UnityEngine.Experimental.XR.Interaction;
namespace UnityEngine.XR.LegacyInputHelpers
{
public class SwingArmModel : ArmModel
{
[Tooltip("Portion of controller rotation applied to the shoulder joint.")]
[SerializeField]
[Range(0.0f, 1.0f)]
float m_ShoulderRotationRatio = 0.5f;
/// <summary>
/// Portion of controller rotation applied to the shoulder joint.
/// </summary>
public float shoulderRotationRatio
{
get { return m_ShoulderRotationRatio; }
set { m_ShoulderRotationRatio = value; }
}
[Tooltip("Portion of controller rotation applied to the elbow joint.")]
[Range(0.0f, 1.0f)]
[SerializeField]
float m_ElbowRotationRatio = 0.3f;
/// <summary>
/// Portion of controller rotation applied to the elbow joint.
/// </summary>
public float elbowRotationRatio
{
get { return m_ElbowRotationRatio; }
set { m_ElbowRotationRatio = value; }
}
[Tooltip("Portion of controller rotation applied to the wrist joint.")]
[Range(0.0f, 1.0f)]
[SerializeField]
float m_WristRotationRatio = 0.2f;
/// <summary>
/// Portion of controller rotation applied to the wrist joint.
/// </summary>
public float wristRotationRatio
{
get { return m_WristRotationRatio; }
set { m_WristRotationRatio = value; }
}
[SerializeField]
Vector2 m_JointShiftAngle = new Vector2(160.0f, 180.0f);
/// <summary>
/// Min angle of the controller before starting to lerp towards the shifted joint ratios.
/// </summary>
public float minJointShiftAngle
{
get { return m_JointShiftAngle.x; }
set { m_JointShiftAngle.x = value; }
}
/// <summary>
/// Max angle of the controller before starting to lerp towards the shifted joint ratios.
/// </summary>
public float maxJointShiftAngle
{
get { return m_JointShiftAngle.y; }
set { m_JointShiftAngle.y = value; }
}
[Tooltip("Exponent applied to the joint shift ratio to control the curve of the shift.")]
[Range(1.0f, 20.0f)]
[SerializeField]
float m_JointShiftExponent = 6.0f;
/// <summary>
/// Exponent applied to the joint shift ratio to control the curve of the shift.
/// </summary>
public float jointShiftExponent
{
get { return m_JointShiftExponent; }
set { m_JointShiftExponent = value; }
}
[Tooltip("Portion of controller rotation applied to the shoulder joint when the controller is backwards.")]
[Range(0.0f, 1.0f)]
[SerializeField]
float m_ShiftedShoulderRotationRatio = 0.1f;
/// <summary>
/// Portion of controller rotation applied to the shoulder joint when the controller is backwards.
/// </summary>
public float shiftedShoulderRotationRatio
{
get { return m_ShiftedShoulderRotationRatio; }
set { m_ShiftedShoulderRotationRatio = value; }
}
[Tooltip("Portion of controller rotation applied to the elbow joint when the controller is backwards.")]
[Range(0.0f, 1.0f)]
[SerializeField]
float m_ShiftedElbowRotationRatio = 0.4f;
/// <summary>
/// Portion of controller rotation applied to the elbow joint when the controller is backwards.
/// </summary>
public float shiftedElbowRotationRatio
{
get { return m_ShiftedElbowRotationRatio; }
set { m_ShiftedElbowRotationRatio = value; }
}
[Tooltip("Portion of controller rotation applied to the wrist joint when the controller is backwards.")]
[Range(0.0f, 1.0f)]
[SerializeField]
float m_ShiftedWristRotationRatio = 0.5f;
/// <summary>
/// Portion of controller rotation applied to the wrist joint when the controller is backwards.
/// </summary>
public float shiftedWristRotationRatio
{
get { return m_ShiftedWristRotationRatio; }
set { m_ShiftedWristRotationRatio = value; }
}
protected override void CalculateFinalJointRotations(Quaternion controllerOrientation, Quaternion xyRotation, Quaternion lerpRotation)
{
// As the controller angle increases the ratio of the rotation applied to each joint shifts.
float totalAngle = Quaternion.Angle(xyRotation, Quaternion.identity);
float jointShiftAngleRange = maxJointShiftAngle - minJointShiftAngle;
float angleRatio = Mathf.Clamp01((totalAngle - minJointShiftAngle) / jointShiftAngleRange);
float jointShiftRatio = Mathf.Pow(angleRatio, m_JointShiftExponent);
// Calculate what portion of the rotation is applied to each joint.
float finalShoulderRatio = Mathf.Lerp(m_ShoulderRotationRatio, m_ShiftedShoulderRotationRatio, jointShiftRatio);
float finalElbowRatio = Mathf.Lerp(m_ElbowRotationRatio, m_ShiftedElbowRotationRatio, jointShiftRatio);
float finalWristRatio = Mathf.Lerp(m_WristRotationRatio, m_ShiftedWristRotationRatio, jointShiftRatio);
// Calculate relative rotations for each joint.
Quaternion swingShoulderRot = Quaternion.Lerp(Quaternion.identity, xyRotation, finalShoulderRatio);
Quaternion swingElbowRot = Quaternion.Lerp(Quaternion.identity, xyRotation, finalElbowRatio);
Quaternion swingWristRot = Quaternion.Lerp(Quaternion.identity, xyRotation, finalWristRatio);
// Calculate final rotations.
Quaternion shoulderRotation = m_TorsoRotation * swingShoulderRot;
m_ElbowRotation = shoulderRotation * swingElbowRot;
m_WristRotation = elbowRotation * swingWristRot;
m_ControllerRotation = m_TorsoRotation * controllerOrientation;
m_TorsoRotation = shoulderRotation;
}
}
}
#endif

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2950d57dafc0eed449fa54e88bc8146c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,258 @@
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Modified by Unity from original:
// https://github.com/googlevr/daydream-elements/blob/master/Assets/DaydreamElements/Elements/ArmModels/Scripts/ArmModels/TransitionArmModel.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using UnityEngine;
using UnityEngine.Events;
#if ENABLE_VR || ENABLE_AR
using UnityEngine.Experimental.XR.Interaction;
using UnityEngine.SpatialTracking;
[assembly: InternalsVisibleTo("UnityEditor.XR.LegacyInputHelpers")]
namespace UnityEngine.XR.LegacyInputHelpers
{
[Serializable]
public class ArmModelTransition
{
[SerializeField]
String m_KeyName;
/// <summary>
/// the string name that will be used to trigger a transition
/// </summary>
public string transitionKeyName
{
get { return m_KeyName; }
set { m_KeyName = value; }
}
[SerializeField]
ArmModel m_ArmModel;
/// <summary>
/// the arm model that will be transitioned to on receiving this event.
/// </summary>
public ArmModel armModel
{
get { return m_ArmModel; }
set { m_ArmModel = value; }
}
}
public class TransitionArmModel : ArmModel
{
[SerializeField]
ArmModel m_CurrentArmModelComponent = null;
/// <summary>
/// This field contains the current active arm model that will be used as the input to the tracked pose driver which is
/// using the transitional arm model.
/// </summary>
public ArmModel currentArmModelComponent
{
get { return m_CurrentArmModelComponent; }
set { m_CurrentArmModelComponent = value; }
}
[SerializeField]
public List<ArmModelTransition> m_ArmModelTransitions = new List<ArmModelTransition>();
/// Max number of active transitions that can be going on at one time.
/// Transitions are only completed when the controller rotates, so if TransitionToArmModel
/// is called several times without the controller moving, the number of active transitions can
/// add up.
private const int MAX_ACTIVE_TRANSITIONS = 10;
/// When transitioning to a new arm model, drop any old transitions that have barely begun.
private const float DROP_TRANSITION_THRESHOLD = 0.035f;
/// Threshold for clamping transitions that have been completed.
private const float LERP_CLAMP_THRESHOLD = 0.95f;
/// Minimum amount of angular velocity on the controller before transitioning occurs.
private const float MIN_ANGULAR_VELOCITY = 0.2f;
/// Unit less weight for how much the angular velocity impacts the transition.
private const float ANGULAR_VELOCITY_DIVISOR = 45.0f;
internal struct ArmModelBlendData
{
public ArmModel armModel;
public float currentBlendAmount;
}
internal List<ArmModelBlendData> armModelBlendData = new List<ArmModelBlendData>(MAX_ACTIVE_TRANSITIONS);
ArmModelBlendData currentBlendingArmModel;
public bool Queue(string key)
{
// attempt to find the arm model to blend to using the supplied key.
foreach(var am in m_ArmModelTransitions)
{
if(am.transitionKeyName == key)
{
Queue(am.armModel);
return true;
}
}
return false;
}
public void Queue(ArmModel newArmModel)
{
if(newArmModel == null)
{
return;
}
if(m_CurrentArmModelComponent == null)
{
m_CurrentArmModelComponent = newArmModel;
}
RemoveJustStartingTransitions();
if (armModelBlendData.Count == MAX_ACTIVE_TRANSITIONS)
{
RemoveOldestTransition();
}
var ambd = new ArmModelBlendData();
ambd.armModel = newArmModel;
ambd.currentBlendAmount = 0.0f;
armModelBlendData.Add(ambd);
}
void RemoveJustStartingTransitions()
{
for( int i = 0; i < armModelBlendData.Count; ++i)
{
ArmModelBlendData ambd = armModelBlendData[i];
if (ambd.currentBlendAmount < DROP_TRANSITION_THRESHOLD)
{
armModelBlendData.RemoveAt(i);
}
}
}
void RemoveOldestTransition()
{
armModelBlendData.RemoveAt(0);
}
public override PoseDataFlags GetPoseFromProvider(out Pose output)
{
if (UpdateBlends())
{
output = finalPose;
return PoseDataFlags.Position | PoseDataFlags.Rotation;
}
output = Pose.identity;
return PoseDataFlags.NoData;
}
bool UpdateBlends()
{
if (currentArmModelComponent == null)
{
return false;
}
if (m_CurrentArmModelComponent.OnControllerInputUpdated())
{
m_NeckPosition = m_CurrentArmModelComponent.neckPosition;
m_ElbowPosition = m_CurrentArmModelComponent.elbowPosition;
m_WristPosition = m_CurrentArmModelComponent.wristPosition;
m_ControllerPosition = m_CurrentArmModelComponent.controllerPosition;
m_ElbowRotation = m_CurrentArmModelComponent.elbowRotation;
m_WristRotation = m_CurrentArmModelComponent.wristRotation;
m_ControllerRotation = m_CurrentArmModelComponent.controllerRotation;
#if UNITY_EDITOR
m_TorsoDirection = m_CurrentArmModelComponent.torsoDirection;
m_TorsoRotation = m_CurrentArmModelComponent.torsoRotation;
#endif
Vector3 angVel;
if (TryGetAngularVelocity(poseSource, out angVel) && armModelBlendData.Count > 0)
{
float angularVelocity = angVel.magnitude;
float lerpValue = Mathf.Clamp(((angularVelocity) - MIN_ANGULAR_VELOCITY) / ANGULAR_VELOCITY_DIVISOR, 0.0f, 0.1f);
for (int i = 0; i < armModelBlendData.Count; ++i)
{
ArmModelBlendData ambd = armModelBlendData[i];
ambd.currentBlendAmount = Mathf.Lerp(ambd.currentBlendAmount, 1.0f, lerpValue);
if (ambd.currentBlendAmount > LERP_CLAMP_THRESHOLD)
{
ambd.currentBlendAmount = 1.0f;
}
else
{
ambd.armModel.OnControllerInputUpdated();
m_NeckPosition = Vector3.Slerp(neckPosition, ambd.armModel.neckPosition, ambd.currentBlendAmount);
m_ElbowPosition = Vector3.Slerp(elbowPosition, ambd.armModel.elbowPosition, ambd.currentBlendAmount);
m_WristPosition = Vector3.Slerp(wristPosition, ambd.armModel.wristPosition, ambd.currentBlendAmount);
m_ControllerPosition = Vector3.Slerp(controllerPosition, ambd.armModel.controllerPosition, ambd.currentBlendAmount);
m_ElbowRotation = Quaternion.Slerp(elbowRotation, ambd.armModel.elbowRotation, ambd.currentBlendAmount);
m_WristRotation = Quaternion.Slerp(wristRotation, ambd.armModel.wristRotation, ambd.currentBlendAmount);
m_ControllerRotation = Quaternion.Slerp(controllerRotation, ambd.armModel.controllerRotation, ambd.currentBlendAmount);
}
// write back.
armModelBlendData[i] = ambd;
if (ambd.currentBlendAmount >= 1.0f)
{
m_CurrentArmModelComponent = ambd.armModel;
armModelBlendData.RemoveRange(0, i + 1);
}
}
}
else if (armModelBlendData.Count > 0)
{
Debug.LogErrorFormat(this.gameObject, "Unable to get angular acceleration for node");
return false;
}
finalPose = new Pose(controllerPosition, controllerRotation);
return true;
}
else
{
return false;
}
}
#if UNITY_EDITOR
internal List<ArmModelBlendData> GetActiveBlends()
{
return armModelBlendData;
}
#endif
}
}
#endif

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 93492893b74ab764b83d940916a59b03
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,320 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
#if ENABLE_VR || ENABLE_AR
using UnityEngine.XR;
namespace UnityEditor.XR.LegacyInputHelpers
{
public enum UserRequestedTrackingMode
{
Default,
Device,
Floor,
}
[AddComponentMenu("XR/Camera Offset")]
public class CameraOffset : MonoBehaviour
{
const float k_DefaultCameraYOffset = 1.36144f;
[SerializeField]
[Tooltip("GameObject to move to desired height off the floor (defaults to this object if none provided).")]
GameObject m_CameraFloorOffsetObject = null;
/// <summary>Gets or sets the GameObject to move to desired height off the floor (defaults to this object if none provided).</summary>
public GameObject cameraFloorOffsetObject { get { return m_CameraFloorOffsetObject; } set { m_CameraFloorOffsetObject = value; UpdateTrackingOrigin(m_TrackingOriginMode); } }
[SerializeField]
[Tooltip("What the user wants the tracking origin mode to be")]
UserRequestedTrackingMode m_RequestedTrackingMode = UserRequestedTrackingMode.Default;
public UserRequestedTrackingMode requestedTrackingMode { get { return m_RequestedTrackingMode; } set { m_RequestedTrackingMode = value; TryInitializeCamera(); } }
#if UNITY_2019_3_OR_NEWER
[SerializeField]
[Tooltip("Sets the type of tracking origin to use for this Rig. Tracking origins identify where 0,0,0 is in the world of tracking.")]
/// <summary>Gets or sets the type of tracking origin to use for this Rig. Tracking origins identify where 0,0,0 is in the world of tracking. Not all devices support all tracking spaces; if the selected tracking space is not set it will fall back to Stationary.</summary>
TrackingOriginModeFlags m_TrackingOriginMode = TrackingOriginModeFlags.Unknown;
public TrackingOriginModeFlags TrackingOriginMode { get { return m_TrackingOriginMode; } set { m_TrackingOriginMode = value; TryInitializeCamera(); } }
#endif
// Disable Obsolete warnings for TrackingSpaceType, explicitly to read in old data and upgrade.
#pragma warning disable 0618
[SerializeField]
[Tooltip("Set if the XR experience is Room Scale or Stationary.")]
TrackingSpaceType m_TrackingSpace = TrackingSpaceType.Stationary;
/// <summary>Gets or sets if the experience is rooms scale or stationary. Not all devices support all tracking spaces; if the selected tracking space is not set it will fall back to Stationary.</summary>
#if UNITY_2019_3_OR_NEWER
[Obsolete("CameraOffset.trackingSpace is obsolete. Please use CameraOffset.trackingOriginMode.")]
#endif
public TrackingSpaceType trackingSpace { get { return m_TrackingSpace; } set { m_TrackingSpace = value; TryInitializeCamera(); } }
#pragma warning restore 0618
[SerializeField]
[Tooltip("Camera Height to be used when in Device tracking space.")]
float m_CameraYOffset = k_DefaultCameraYOffset;
/// <summary>Gets or sets the amount the camera is offset from the floor (by moving the camera offset object).</summary>
public float cameraYOffset { get { return m_CameraYOffset; } set { m_CameraYOffset = value; UpdateTrackingOrigin(m_TrackingOriginMode); } }
// Bookkeeping to track lazy initialization of the tracking space type.
bool m_CameraInitialized = false;
bool m_CameraInitializing = false;
#if UNITY_2019_3_OR_NEWER
/// <summary>
/// Used to cache the input subsystems without creating additional garbage.
/// </summary>
static List<XRInputSubsystem> s_InputSubsystems = new List<XRInputSubsystem>();
#endif
/// Utility helper to migrate from TrackingSpace to TrackingOrigin seamlessly
void UpgradeTrackingSpaceToTrackingOriginMode()
{
#if UNITY_2019_3_OR_NEWER
// Disable Obsolete warnings for TrackingSpaceType, explicitly to allow a proper upgrade path.
#pragma warning disable 0618
if (m_TrackingOriginMode == TrackingOriginModeFlags.Unknown && m_TrackingSpace <= TrackingSpaceType.RoomScale)
{
switch (m_TrackingSpace)
{
case TrackingSpaceType.RoomScale:
{
m_TrackingOriginMode = TrackingOriginModeFlags.Floor;
break;
}
case TrackingSpaceType.Stationary:
{
m_TrackingOriginMode = TrackingOriginModeFlags.Device;
break;
}
default:
break;
}
// Tag is Invalid not to be used.
m_TrackingSpace = (TrackingSpaceType)3;
#if UNITY_EDITOR
EditorUtility.SetDirty(this);
#endif //UNITY_EDITOR
#pragma warning restore 0618
}
#endif //UNITY_2019_3_OR_NEWER
}
void Awake()
{
if (!m_CameraFloorOffsetObject)
{
Debug.LogWarning("No camera container specified for XR Rig, using attached GameObject");
m_CameraFloorOffsetObject = this.gameObject;
}
}
void Start()
{
TryInitializeCamera();
}
void OnValidate()
{
UpgradeTrackingSpaceToTrackingOriginMode();
TryInitializeCamera();
}
void TryInitializeCamera()
{
m_CameraInitialized = SetupCamera();
if (!m_CameraInitialized & !m_CameraInitializing)
StartCoroutine(RepeatInitializeCamera());
}
/// <summary>
/// Repeatedly attempt to initialize the camera.
/// </summary>
/// <returns></returns>
IEnumerator RepeatInitializeCamera()
{
m_CameraInitializing = true;
yield return null;
while (!m_CameraInitialized)
{
m_CameraInitialized = SetupCamera();
yield return null;
}
m_CameraInitializing = false;
}
/// <summary>
/// Handles re-centering and off-setting the camera in space depending on which tracking space it is setup in.
/// </summary>
#if UNITY_2019_3_OR_NEWER
bool SetupCamera()
{
SubsystemManager.GetInstances<XRInputSubsystem>(s_InputSubsystems);
bool initialized = true;
if (s_InputSubsystems.Count != 0)
{
for (int i = 0; i < s_InputSubsystems.Count; i++)
{
var result = SetupCamera(s_InputSubsystems[i]);
// After the camera is successfully set up register the callback for
// handing tracking origin changes. It is possible this could happen more than
// once so unregister the callback first just in case.
if (result)
{
s_InputSubsystems[i].trackingOriginUpdated -= OnTrackingOriginUpdated;
s_InputSubsystems[i].trackingOriginUpdated += OnTrackingOriginUpdated;
}
initialized &= result;
}
}
else
{
// Disable Obsolete warnings for TrackingSpaceType, explicitly to allow a proper upgrade path.
#pragma warning disable 0618
if (m_RequestedTrackingMode == UserRequestedTrackingMode.Floor)
{
SetupCameraLegacy(TrackingSpaceType.RoomScale);
}
else
{
SetupCameraLegacy(TrackingSpaceType.Stationary);
}
#pragma warning restore 0618
}
return initialized;
}
bool SetupCamera(XRInputSubsystem subsystem)
{
if (subsystem == null)
return false;
bool trackingSettingsSet = false;
var currentMode = subsystem.GetTrackingOriginMode();
var supportedModes = subsystem.GetSupportedTrackingOriginModes();
TrackingOriginModeFlags requestedMode = TrackingOriginModeFlags.Unknown;
// map between the user requested options, and the actual options.
if (m_RequestedTrackingMode == UserRequestedTrackingMode.Default)
{
requestedMode = currentMode;
}
else if(m_RequestedTrackingMode == UserRequestedTrackingMode.Device)
{
requestedMode = TrackingOriginModeFlags.Device;
}
else if (m_RequestedTrackingMode == UserRequestedTrackingMode.Floor)
{
requestedMode = TrackingOriginModeFlags.Floor;
}
else
{
Debug.LogWarning("Unknown Requested Tracking Mode");
}
// now we've mapped em. actually go set em.
if (requestedMode == TrackingOriginModeFlags.Floor)
{
// We need to check for Unknown because we may not be in a state where we can read this data yet.
if ((supportedModes & (TrackingOriginModeFlags.Floor | TrackingOriginModeFlags.Unknown)) == 0)
Debug.LogWarning("CameraOffset.SetupCamera: Attempting to set the tracking space to Floor, but that is not supported by the SDK.");
else
trackingSettingsSet = subsystem.TrySetTrackingOriginMode(requestedMode);
}
else if (requestedMode == TrackingOriginModeFlags.Device)
{
// We need to check for Unknown because we may not be in a state where we can read this data yet.
if ((supportedModes & (TrackingOriginModeFlags.Device | TrackingOriginModeFlags.Unknown)) == 0)
Debug.LogWarning("CameraOffset.SetupCamera: Attempting to set the tracking space to Device, but that is not supported by the SDK.");
else
trackingSettingsSet = subsystem.TrySetTrackingOriginMode(requestedMode) && subsystem.TryRecenter();
}
if(trackingSettingsSet)
UpdateTrackingOrigin(subsystem.GetTrackingOriginMode());
return trackingSettingsSet;
}
private void UpdateTrackingOrigin(TrackingOriginModeFlags trackingOriginModeFlags)
{
m_TrackingOriginMode = trackingOriginModeFlags;
if (m_CameraFloorOffsetObject != null)
m_CameraFloorOffsetObject.transform.localPosition = new Vector3(
m_CameraFloorOffsetObject.transform.localPosition.x,
m_TrackingOriginMode == TrackingOriginModeFlags.Device ? cameraYOffset : 0.0f,
m_CameraFloorOffsetObject.transform.localPosition.z);
}
private void OnTrackingOriginUpdated(XRInputSubsystem subsystem) => UpdateTrackingOrigin(subsystem.GetTrackingOriginMode());
private void OnDestroy()
{
SubsystemManager.GetInstances(s_InputSubsystems);
foreach (var subsystem in s_InputSubsystems)
subsystem.trackingOriginUpdated -= OnTrackingOriginUpdated;
}
#else
bool SetupCamera()
{
if (m_RequestedTrackingMode == UserRequestedTrackingMode.Floor)
{
SetupCameraLegacy(TrackingSpaceType.RoomScale);
}
else if(m_RequestedTrackingMode == UserRequestedTrackingMode.Device)
{
SetupCameraLegacy(TrackingSpaceType.Stationary);
}
else if (m_RequestedTrackingMode == UserRequestedTrackingMode.Default)
{
TrackingSpaceType tst = XRDevice.GetTrackingSpaceType();
SetupCameraLegacy(tst);
}
else
{
Debug.LogWarning("CameraOffset.SetupCamera: Unknown requested ");
}
return true;
}
#endif
// Disable Obsolete warnings for TrackingSpaceType, explicitly to allow for using legacy data if available.
#pragma warning disable 0618
void SetupCameraLegacy(TrackingSpaceType trackingSpace)
{
float cameraYOffset = m_CameraYOffset;
XRDevice.SetTrackingSpaceType(trackingSpace);
if (trackingSpace == TrackingSpaceType.Stationary)
InputTracking.Recenter();
else if (trackingSpace == TrackingSpaceType.RoomScale)
cameraYOffset = 0;
m_TrackingSpace = trackingSpace;
// Move camera to correct height
if (m_CameraFloorOffsetObject)
m_CameraFloorOffsetObject.transform.localPosition = new Vector3(m_CameraFloorOffsetObject.transform.localPosition.x, cameraYOffset, m_CameraFloorOffsetObject.transform.localPosition.z);
}
#pragma warning restore 0618
}
}
#endif

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a2483b9bd782f9449a5972b61b7d51a9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,15 @@
{
"name": "UnityEngine.XR.LegacyInputHelpers",
"references": [
"UnityEngine.SpatialTracking",
"Windows.UI.Input.Spatial"
],
"optionalUnityReferences": [],
"includePlatforms": [],
"excludePlatforms": [],
"allowUnsafeCode": false,
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": []
}

View file

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 28c5e819ece8a0746abb16dc5aa91f44
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7c691f0ea17c03e47ac0c3d4c7125748
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,5 @@
using System.Runtime.CompilerServices;
using UnityEngine;
// ADD_NEW_PLATFORM_HERE
[assembly: InternalsVisibleTo("UnityEditor.XR.SpatialTracking")]

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4a77633e6daf24a45994d8aae1b52268
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,42 @@
using System;
using UnityEngine.SpatialTracking;
namespace UnityEngine.Experimental.XR.Interaction
{
/// <summary>
/// The BasePoseProvider type is used as the base interface for all "Pose Providers"
/// Implementing this abstract class will allow the Pose Provider to be linked to a Tracked Pose Driver.
/// </summary>
[Serializable]
public abstract class BasePoseProvider : MonoBehaviour
{
/// <summary>
/// Gets the Pose value from the Pose Provider.
/// Specializations will return the correct bitflags relating to the Pose data they are returning.
/// </summary>
/// <param name="output">When this method returns, contains the Pose data from the Pose Provider.</param>
/// <returns>Returns whether position and/or rotation was set on the Pose struct returned with <paramref name="output"/>.</returns>
public virtual PoseDataFlags GetPoseFromProvider(out Pose output)
{
// Disabling the obsolete warning/error here so that no error is generated by the use of this function.
#pragma warning disable 618,619
if (TryGetPoseFromProvider(out output))
{
return PoseDataFlags.Position | PoseDataFlags.Rotation;
}
#pragma warning restore 618,619
return PoseDataFlags.NoData;
}
/// <summary>
/// This function is provided for backwards compatibility with the BasePoseProvider found in com.unity.xr.legacyinputhelpers v1.3.X.
/// Please do not implement this function, instead use the new API via <see cref="GetPoseFromProvider"/>.
/// </summary>
[Obsolete("This function is provided for backwards compatibility with the BasePoseProvider found in com.unity.xr.legacyinputhelpers v1.3.X. Please do not implement this function, instead use the new API via GetPoseFromProvider", false)]
public virtual bool TryGetPoseFromProvider(out Pose output)
{
output = Pose.identity;
return false;
}
}
}

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: aa96b4d255b4b9f4da4c77ce337393ec
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,581 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using UnityEngine.Experimental.XR.Interaction;
#if ENABLE_AR || ENABLE_VR
using UnityEngine.XR;
#endif
[assembly: InternalsVisibleTo("UnityEditor.SpatialTracking")]
namespace UnityEngine.SpatialTracking
{
internal class TrackedPoseDriverDataDescription
{
internal struct PoseData
{
public List<string> PoseNames;
public List<TrackedPoseDriver.TrackedPose> Poses;
}
internal static List<PoseData> DeviceData = new List<PoseData>
{
// Generic XR Device
new PoseData
{
PoseNames = new List<string>
{
"Left Eye", "Right Eye", "Center Eye - HMD Reference", "Head", "Color Camera"
},
Poses = new List<TrackedPoseDriver.TrackedPose>
{
TrackedPoseDriver.TrackedPose.LeftEye,
TrackedPoseDriver.TrackedPose.RightEye,
TrackedPoseDriver.TrackedPose.Center,
TrackedPoseDriver.TrackedPose.Head,
TrackedPoseDriver.TrackedPose.ColorCamera
}
},
// generic controller
new PoseData
{
PoseNames = new List<string>
{
"Left Controller", "Right Controller"
},
Poses = new List<TrackedPoseDriver.TrackedPose>
{
TrackedPoseDriver.TrackedPose.LeftPose,
TrackedPoseDriver.TrackedPose.RightPose
}
},
// generic remote
new PoseData
{
PoseNames = new List<string>
{
"Device Pose"
},
Poses = new List<TrackedPoseDriver.TrackedPose>
{
TrackedPoseDriver.TrackedPose.RemotePose,
}
},
};
}
/// <summary>
/// Bitflag enum which represents what data was set on an associated Pose struct
/// </summary>
[Flags]
public enum PoseDataFlags
{
/// <summary>
/// No data was actually set on the pose
/// </summary>
NoData = 0,
/// <summary>
/// If this flag is set, position data was updated on the associated pose struct
/// </summary>
Position = 1 << 0,
/// <summary>
/// If this flag is set, rotation data was updated on the associated pose struct
/// </summary>
Rotation = 1 << 1,
}
/// <summary>
/// The PoseDataSource class acts as a container for the GetDataFromSource method call that should be used by PoseProviders wanting to query data for a particular pose.
/// </summary>
public static class PoseDataSource
{
#if ENABLE_AR || ENABLE_VR
static internal List<XR.XRNodeState> nodeStates = new List<XR.XRNodeState>();
static internal PoseDataFlags GetNodePoseData(XR.XRNode node, out Pose resultPose)
{
PoseDataFlags retData = PoseDataFlags.NoData;
XR.InputTracking.GetNodeStates(nodeStates);
foreach (XR.XRNodeState nodeState in nodeStates)
{
if (nodeState.nodeType == node)
{
if (nodeState.TryGetPosition(out resultPose.position))
{
retData |= PoseDataFlags.Position;
}
if (nodeState.TryGetRotation(out resultPose.rotation))
{
retData |= PoseDataFlags.Rotation;
}
return retData;
}
}
resultPose = Pose.identity;
return retData;
}
#endif
/// <summary>The GetDataFromSource method is used to query data from the XRNode subsystem based on the provided pose source.</summary>
/// <param name = "poseSource" > The pose source to request data for.</param>
/// <param name = "resultPose" > The resulting pose data.</param>
/// <returns>True, if the pose source is valid, otherwise false.</returns>
public static bool TryGetDataFromSource(TrackedPoseDriver.TrackedPose poseSource, out Pose resultPose)
{
return GetDataFromSource(poseSource, out resultPose) == (PoseDataFlags.Position | PoseDataFlags.Rotation);
}
/// <summary>The GetDataFromSource method is used to query data from the XRNode subsystem based on the provided pose source.</summary>
/// <param name = "poseSource" > The pose source to request data for.</param>
/// <param name = "resultPose" > The resulting pose data. This function will return the Center Eye pose if the Color Camera pose is not available. </param>
/// <returns>Returns a bitflag which represents which data has been retrieved from the provided pose source</returns>
public static PoseDataFlags GetDataFromSource(TrackedPoseDriver.TrackedPose poseSource, out Pose resultPose)
{
#if ENABLE_AR || ENABLE_VR
switch (poseSource)
{
case TrackedPoseDriver.TrackedPose.RemotePose:
{
PoseDataFlags retFlags = GetNodePoseData(XR.XRNode.RightHand, out resultPose);
if (retFlags == PoseDataFlags.NoData)
return GetNodePoseData(XR.XRNode.LeftHand, out resultPose);
return retFlags;
}
case TrackedPoseDriver.TrackedPose.LeftEye:
{
return GetNodePoseData(XR.XRNode.LeftEye, out resultPose);
}
case TrackedPoseDriver.TrackedPose.RightEye:
{
return GetNodePoseData(XR.XRNode.RightEye, out resultPose);
}
case TrackedPoseDriver.TrackedPose.Head:
{
return GetNodePoseData(XR.XRNode.Head, out resultPose);
}
case TrackedPoseDriver.TrackedPose.Center:
{
return GetNodePoseData(XR.XRNode.CenterEye, out resultPose);
}
case TrackedPoseDriver.TrackedPose.LeftPose:
{
return GetNodePoseData(XR.XRNode.LeftHand, out resultPose);
}
case TrackedPoseDriver.TrackedPose.RightPose:
{
return GetNodePoseData(XR.XRNode.RightHand, out resultPose);
}
case TrackedPoseDriver.TrackedPose.ColorCamera:
{
// We fall back to CenterEye because we can't currently extend the XRNode structure, nor are we ready to replace it.
return GetNodePoseData(XR.XRNode.CenterEye, out resultPose);
}
default:
{
Debug.LogWarningFormat("Unable to retrieve pose data for poseSource: {0}", poseSource.ToString());
break;
}
}
#endif
resultPose = Pose.identity;
return PoseDataFlags.NoData;
}
}
// The DefaultExecutionOrder is needed because TrackedPoseDriver does some
// of its work in regular Update and FixedUpdate calls, but this needs to
// be done before regular user scripts have their own Update and
// FixedUpdate calls, in order that they correctly get the values for this
// frame and not the previous.
// -32000 is the minimal possible execution order value; -30000 makes it
// unlikely users chose lower values for their scripts by accident, but
// still allows for the possibility.
/// <summary>
/// The TrackedPoseDriver component applies the current Pose value of a tracked device to the transform of the GameObject.
/// TrackedPoseDriver can track multiple types of devices including XR HMDs, controllers, and remotes.
/// </summary>
[DefaultExecutionOrder(-30000)]
[Serializable]
[AddComponentMenu("XR/Tracked Pose Driver")]
[HelpURL("https://docs.unity3d.com/Packages/com.unity.xr.legacyinputhelpers@2.1/manual/index.html")]
public class TrackedPoseDriver : MonoBehaviour
{
/// <summary>
/// The device being tracked by the tracked pose driver
/// </summary>
public enum DeviceType
{
/// <summary>
/// An XR Controller, use this value for controllers
/// </summary>
GenericXRDevice = 0,
/// <summary>
/// An Generic XR Devices, use this value for HMD and AR Mobile device tracking
/// </summary>
GenericXRController = 1,
/// <summary>
/// An XR Remote, use this value for mobile remotes
/// </summary>
GenericXRRemote = 2
}
/// <summary>
/// The list of endpoints that users can track with the <see cref="TrackedPoseDriver"/>
/// </summary>
public enum TrackedPose
{
/// <summary>
/// The left eye of a HMD style device
/// </summary>
LeftEye = 0,
/// <summary>
/// The right eye of a HMD style device
/// </summary>
RightEye = 1,
/// <summary>
/// The center eye of a HMD style device, this is usually the default for most HMDs
/// </summary>
Center = 2,
/// <summary>
/// The head eye of a HMD style device
/// </summary>
Head = 3,
/// <summary>
/// The left hand controller pose
/// </summary>
LeftPose = 4,
/// <summary>
/// The right hand controller pose
/// </summary>
RightPose = 5,
/// <summary>
/// The color camera of a mobile device
/// </summary>
ColorCamera = 6,
/// <summary>
/// No Longer Used
/// </summary>
DepthCameraDeprecated = 7,
/// <summary>
/// No Longer Used
/// </summary>
FisheyeCameraDeprected = 8,
/// <summary>
/// No Longer Used
/// </summary>
DeviceDeprecated = 9,
/// <summary>
/// The pose of a mobile remote
/// </summary>
RemotePose = 10,
}
[SerializeField]
DeviceType m_Device;
/// <summary>
/// This is used to indicate which pose the TrackedPoseDriver is currently tracking.
/// </summary>
public DeviceType deviceType
{
get { return m_Device; }
internal set { m_Device = value; }
}
[SerializeField]
TrackedPose m_PoseSource = TrackedPoseDriver.TrackedPose.Center;
/// <summary>
/// The pose being tracked by the tracked pose driver
/// </summary>
public TrackedPose poseSource
{
get { return m_PoseSource; }
internal set { m_PoseSource = value; }
}
/// <summary>
/// This method is used to set the device / pose pair for the SpatialTracking.TrackedPoseDriver. setting an invalid combination of these values will return false.
/// </summary>
/// <param name="deviceType">The device type that we wish to track </param>
/// <param name="pose">The pose source that we wish to track</param>
/// <returns>true if the values provided are sensible, otherwise false</returns>
public bool SetPoseSource(DeviceType deviceType, TrackedPose pose)
{
if ((int)deviceType < TrackedPoseDriverDataDescription.DeviceData.Count)
{
TrackedPoseDriverDataDescription.PoseData val = TrackedPoseDriverDataDescription.DeviceData[(int)deviceType];
for (int i = 0; i < val.Poses.Count; ++i)
{
if (val.Poses[i] == pose)
{
this.deviceType = deviceType;
poseSource = pose;
return true;
}
}
}
return false;
}
[SerializeField]
BasePoseProvider m_PoseProviderComponent;
/// <summary>
/// Optional: This field holds the reference to the BasePoseProvider instance that, if set, will be used to override the behavior of
/// the TrackedPoseDriver. When this field is empty, the TrackedPoseDriver will operate as per usual, with pose data being
/// retrieved from the device or pose settings of the TrackedPoseDriver. When this field is set, the pose data will be
/// provided by the attached BasePoseProvider. The device or pose fields will be hidden as they are no longer used to
/// control the parent GameObject Transform.
/// </summary>
public BasePoseProvider poseProviderComponent
{
get { return m_PoseProviderComponent; }
set { m_PoseProviderComponent = value; }
}
PoseDataFlags GetPoseData(DeviceType device, TrackedPose poseSource, out Pose resultPose)
{
return m_PoseProviderComponent != null
? m_PoseProviderComponent.GetPoseFromProvider(out resultPose)
: PoseDataSource.GetDataFromSource(poseSource, out resultPose);
}
/// <summary>
/// This enum is used to indicate which parts of the pose will be applied to the parent transform
/// </summary>
public enum TrackingType
{
/// <summary>
/// With this setting, both the pose's rotation and position will be applied to the parent transform
/// </summary>
RotationAndPosition,
/// <summary>
/// With this setting, only the pose's rotation will be applied to the parent transform
/// </summary>
RotationOnly,
/// <summary>
/// With this setting, only the pose's position will be applied to the parent transform
/// </summary>
PositionOnly
}
[SerializeField]
TrackingType m_TrackingType;
/// <summary>
/// The tracking type being used by the tracked pose driver
/// </summary>
public TrackingType trackingType
{
get { return m_TrackingType; }
set { m_TrackingType = value; }
}
/// <summary>
/// The update type being used by the tracked pose driver
/// </summary>
public enum UpdateType
{
/// <summary>
/// Sample input at both update, and directly before rendering. For smooth head pose tracking,
/// we recommend using this value as it will provide the lowest input latency for the device.
/// This is the default value for the UpdateType option
/// </summary>
UpdateAndBeforeRender,
/// <summary>
/// Only sample input during the update phase of the frame.
/// </summary>
Update,
/// <summary>
/// Only sample input directly before rendering
/// </summary>
BeforeRender,
}
[SerializeField]
UpdateType m_UpdateType = UpdateType.UpdateAndBeforeRender;
/// <summary>
/// The update type being used by the tracked pose driver
/// </summary>
public UpdateType updateType
{
get { return m_UpdateType; }
set { m_UpdateType = value; }
}
[SerializeField]
bool m_UseRelativeTransform = false;
/// <summary>
/// This is used to indicate whether the TrackedPoseDriver will use the object's original transform as its basis.
/// </summary>
public bool UseRelativeTransform
{
get { return m_UseRelativeTransform; }
set { m_UseRelativeTransform = value; }
}
/// <summary>
/// The origin pose is the offset applied to any tracking data. This is only used when in legacy compatibility mode.
/// </summary>
protected Pose m_OriginPose;
/// <summary>
/// originPose is an offset applied to any tracking data read from this object.
/// Setting this value should be reserved for dealing with edge-cases, such as
/// achieving parity between room-scale (floor centered) and stationary (head centered)
/// tracking - without having to alter the transform hierarchy.
/// For user locomotion and gameplay purposes you are usually better off just
/// moving the parent transform of this object.
/// </summary>
public Pose originPose
{
get { return m_OriginPose; }
set { m_OriginPose = value; }
}
private void CacheLocalPosition()
{
m_OriginPose.position = transform.localPosition;
m_OriginPose.rotation = transform.localRotation;
}
private void ResetToCachedLocalPosition()
{
SetLocalTransform(m_OriginPose.position, m_OriginPose.rotation, PoseDataFlags.Position | PoseDataFlags.Rotation);
}
/// <inheritdoc />
protected virtual void Awake()
{
CacheLocalPosition();
#if UNITY_2019_3_OR_NEWER
// deprecated functionality in 2020.1
#elif ENABLE_AR || ENABLE_VR
if (HasStereoCamera())
{
XRDevice.DisableAutoXRCameraTracking(GetComponent<Camera>(), true);
}
#endif
}
/// <inheritdoc />
protected virtual void OnDestroy()
{
#if UNITY_2019_3_OR_NEWER
// deprecated functionality in 2020.1
#elif ENABLE_AR || ENABLE_VR
if (HasStereoCamera())
{
XRDevice.DisableAutoXRCameraTracking(GetComponent<Camera>(), false);
}
#endif
}
/// <inheritdoc />
protected virtual void OnEnable()
{
Application.onBeforeRender += OnBeforeRender;
}
/// <inheritdoc />
protected virtual void OnDisable()
{
// remove delegate registration
ResetToCachedLocalPosition();
Application.onBeforeRender -= OnBeforeRender;
}
/// <inheritdoc />
protected virtual void FixedUpdate()
{
if (m_UpdateType == UpdateType.Update ||
m_UpdateType == UpdateType.UpdateAndBeforeRender)
{
PerformUpdate();
}
}
/// <inheritdoc />
protected virtual void Update()
{
if (m_UpdateType == UpdateType.Update ||
m_UpdateType == UpdateType.UpdateAndBeforeRender)
{
PerformUpdate();
}
}
/// <inheritdoc />
// For the same reason as DefaultExecutionOrder, a callback order is specified to
// apply the pose to the Transform before default user scripts execute.
[BeforeRenderOrder(-30000)]
protected virtual void OnBeforeRender()
{
if (m_UpdateType == UpdateType.BeforeRender ||
m_UpdateType == UpdateType.UpdateAndBeforeRender)
{
PerformUpdate();
}
}
/// <summary>
/// Sets the transform that is being driven by the <see cref="TrackedPoseDriver"/>. will only correct set the rotation or position depending on the <see cref="PoseDataFlags"/>
/// </summary>
/// <param name="newPosition">The position to apply.</param>
/// <param name="newRotation">The rotation to apply.</param>
/// <param name="poseFlags">The flags indiciating which of the position/rotation values are provided by the calling code.</param>
protected virtual void SetLocalTransform(Vector3 newPosition, Quaternion newRotation, PoseDataFlags poseFlags)
{
if ((m_TrackingType == TrackingType.RotationAndPosition ||
m_TrackingType == TrackingType.RotationOnly) &&
(poseFlags & PoseDataFlags.Rotation) > 0)
{
transform.localRotation = newRotation;
}
if ((m_TrackingType == TrackingType.RotationAndPosition ||
m_TrackingType == TrackingType.PositionOnly) &&
(poseFlags & PoseDataFlags.Position) > 0)
{
transform.localPosition = newPosition;
}
}
/// <summary>
/// This is only used when running in legacy mode, and will fake the behavior of the old implicit camera tracking. This will transform by the origin pose if necessary.
/// </summary>
/// <param name="pose">Pose to transform by the origin if in relative transform mode.</param>
/// <returns>The pose, with the applied transform if in Relative Transform mode.</returns>
protected Pose TransformPoseByOriginIfNeeded(Pose pose)
{
if (m_UseRelativeTransform)
{
return pose.GetTransformedBy(m_OriginPose);
}
else
{
return pose;
}
}
private bool HasStereoCamera()
{
Camera camera = GetComponent<Camera>();
return camera != null && camera.stereoEnabled;
}
/// <summary>
/// PerformUpdate queries the data from the selected pose source, and then calls <see cref="SetLocalTransform"/> to apply the pose.
/// </summary>
protected virtual void PerformUpdate()
{
if (!enabled)
return;
Pose currentPose;
PoseDataFlags poseFlags = GetPoseData(m_Device, m_PoseSource, out currentPose);
if (poseFlags != PoseDataFlags.NoData)
{
Pose localPose = TransformPoseByOriginIfNeeded(currentPose);
SetLocalTransform(localPose.position, localPose.rotation, poseFlags);
}
}
}
}

View file

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5a2a9c34df4095f47b9ca8f975175f5b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View file

@ -0,0 +1,6 @@
{
"name": "UnityEngine.SpatialTracking",
"references": [],
"includePlatforms": [],
"excludePlatforms": []
}

View file

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: ba171b3dd2a51234ab864770f99741a5
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant: