Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

194 lines
4.9 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. using System;
  14. using System.Runtime.InteropServices;
  15. using UnityEngine;
  16. using VR = UnityEngine.VR;
  17. /// <summary>
  18. /// An infrared camera that tracks the position of a head-mounted display.
  19. /// </summary>
  20. public class OVRTracker
  21. {
  22. /// <summary>
  23. /// The (symmetric) visible area in front of the sensor.
  24. /// </summary>
  25. public struct Frustum
  26. {
  27. /// <summary>
  28. /// The sensor's minimum supported distance to the HMD.
  29. /// </summary>
  30. public float nearZ;
  31. /// <summary>
  32. /// The sensor's maximum supported distance to the HMD.
  33. /// </summary>
  34. public float farZ;
  35. /// <summary>
  36. /// The sensor's horizontal and vertical fields of view in degrees.
  37. /// </summary>
  38. public Vector2 fov;
  39. }
  40. /// <summary>
  41. /// If true, a sensor is attached to the system.
  42. /// </summary>
  43. public bool isPresent
  44. {
  45. get {
  46. if (!OVRManager.isHmdPresent)
  47. return false;
  48. return OVRPlugin.positionSupported;
  49. }
  50. }
  51. /// <summary>
  52. /// If true, the sensor is actively tracking the HMD's position. Otherwise the HMD may be temporarily occluded, the system may not support position tracking, etc.
  53. /// </summary>
  54. public bool isPositionTracked
  55. {
  56. get {
  57. return OVRPlugin.positionTracked;
  58. }
  59. }
  60. /// <summary>
  61. /// If this is true and a sensor is available, the system will use position tracking when isPositionTracked is also true.
  62. /// </summary>
  63. public bool isEnabled
  64. {
  65. get {
  66. if (!OVRManager.isHmdPresent)
  67. return false;
  68. return OVRPlugin.position;
  69. }
  70. set {
  71. if (!OVRManager.isHmdPresent)
  72. return;
  73. OVRPlugin.position = value;
  74. }
  75. }
  76. /// <summary>
  77. /// Returns the number of sensors currently connected to the system.
  78. /// </summary>
  79. public int count
  80. {
  81. get {
  82. int count = 0;
  83. for (int i = 0; i < (int)OVRPlugin.Tracker.Count; ++i)
  84. {
  85. if (GetPresent(i))
  86. count++;
  87. }
  88. return count;
  89. }
  90. }
  91. /// <summary>
  92. /// Gets the sensor's viewing frustum.
  93. /// </summary>
  94. public Frustum GetFrustum(int tracker = 0)
  95. {
  96. if (!OVRManager.isHmdPresent)
  97. return new Frustum();
  98. return OVRPlugin.GetTrackerFrustum((OVRPlugin.Tracker)tracker).ToFrustum();
  99. }
  100. /// <summary>
  101. /// Gets the sensor's pose, relative to the head's pose at the time of the last pose recentering.
  102. /// </summary>
  103. public OVRPose GetPose(int tracker = 0)
  104. {
  105. if (!OVRManager.isHmdPresent)
  106. return OVRPose.identity;
  107. OVRPose p;
  108. switch (tracker)
  109. {
  110. case 0:
  111. p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerZero, OVRPlugin.Step.Render).ToOVRPose();
  112. break;
  113. case 1:
  114. p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerOne, OVRPlugin.Step.Render).ToOVRPose();
  115. break;
  116. case 2:
  117. p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerTwo, OVRPlugin.Step.Render).ToOVRPose();
  118. break;
  119. case 3:
  120. p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerThree, OVRPlugin.Step.Render).ToOVRPose();
  121. break;
  122. default:
  123. return OVRPose.identity;
  124. }
  125. return new OVRPose()
  126. {
  127. position = p.position,
  128. orientation = p.orientation * Quaternion.Euler(0, 180, 0)
  129. };
  130. }
  131. /// <summary>
  132. /// If true, the pose of the sensor is valid and is ready to be queried.
  133. /// </summary>
  134. public bool GetPoseValid(int tracker = 0)
  135. {
  136. if (!OVRManager.isHmdPresent)
  137. return false;
  138. switch (tracker)
  139. {
  140. case 0:
  141. return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerZero);
  142. case 1:
  143. return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerOne);
  144. case 2:
  145. return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerTwo);
  146. case 3:
  147. return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerThree);
  148. default:
  149. return false;
  150. }
  151. }
  152. public bool GetPresent(int tracker = 0)
  153. {
  154. if (!OVRManager.isHmdPresent)
  155. return false;
  156. switch (tracker)
  157. {
  158. case 0:
  159. return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerZero);
  160. case 1:
  161. return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerOne);
  162. case 2:
  163. return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerTwo);
  164. case 3:
  165. return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerThree);
  166. default:
  167. return false;
  168. }
  169. }
  170. }