Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

400 lines
12 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. using System;
  14. using System.Runtime.InteropServices;
  15. using System.Text.RegularExpressions;
  16. using UnityEngine;
  17. using System.Collections.Generic;
  18. #if UNITY_2017_2_OR_NEWER
  19. using InputTracking = UnityEngine.XR.InputTracking;
  20. using Node = UnityEngine.XR.XRNode;
  21. using NodeState = UnityEngine.XR.XRNodeState;
  22. using Settings = UnityEngine.XR.XRSettings;
  23. #elif UNITY_2017_1_OR_NEWER
  24. using InputTracking = UnityEngine.VR.InputTracking;
  25. using Node = UnityEngine.VR.VRNode;
  26. using NodeState = UnityEngine.VR.VRNodeState;
  27. using Settings = UnityEngine.VR.VRSettings;
  28. #else
  29. using Node = UnityEngine.VR.VRNode;
  30. using Settings = UnityEngine.VR.VRSettings;
  31. #endif
  32. /// <summary>
  33. /// Manages an Oculus Rift head-mounted display (HMD).
  34. /// </summary>
  35. public class OVRDisplay
  36. {
  37. /// <summary>
  38. /// Contains full fov information per eye
  39. /// Under Symmetric Fov mode, UpFov == DownFov and LeftFov == RightFov.
  40. /// </summary>
  41. public struct EyeFov
  42. {
  43. public float UpFov;
  44. public float DownFov;
  45. public float LeftFov;
  46. public float RightFov;
  47. }
  48. /// <summary>
  49. /// Specifies the size and field-of-view for one eye texture.
  50. /// </summary>
  51. public struct EyeRenderDesc
  52. {
  53. /// <summary>
  54. /// The horizontal and vertical size of the texture.
  55. /// </summary>
  56. public Vector2 resolution;
  57. /// <summary>
  58. /// The angle of the horizontal and vertical field of view in degrees.
  59. /// For Symmetric FOV interface compatibility
  60. /// Note this includes the fov angle from both sides
  61. /// </summary>
  62. public Vector2 fov;
  63. /// <summary>
  64. /// The full information of field of view in degrees.
  65. /// When Asymmetric FOV isn't enabled, this returns the maximum fov angle
  66. /// </summary>
  67. public EyeFov fullFov;
  68. }
  69. /// <summary>
  70. /// Contains latency measurements for a single frame of rendering.
  71. /// </summary>
  72. public struct LatencyData
  73. {
  74. /// <summary>
  75. /// The time it took to render both eyes in seconds.
  76. /// </summary>
  77. public float render;
  78. /// <summary>
  79. /// The time it took to perform TimeWarp in seconds.
  80. /// </summary>
  81. public float timeWarp;
  82. /// <summary>
  83. /// The time between the end of TimeWarp and scan-out in seconds.
  84. /// </summary>
  85. public float postPresent;
  86. public float renderError;
  87. public float timeWarpError;
  88. }
  89. private bool needsConfigureTexture;
  90. private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
  91. private bool recenterRequested = false;
  92. private int recenterRequestedFrameCount = int.MaxValue;
  93. private OVRPose previousRelativeTrackingSpacePose;
  94. private OVRManager.TrackingOrigin previousTrackingOrigin;
  95. /// <summary>
  96. /// Creates an instance of OVRDisplay. Called by OVRManager.
  97. /// </summary>
  98. public OVRDisplay()
  99. {
  100. UpdateTextures();
  101. if (OVRPlugin.GetSystemHeadsetType() == OVRPlugin.SystemHeadset.Oculus_Quest)
  102. {
  103. previousTrackingOrigin = OVRManager.instance.trackingOriginType;
  104. OVRManager.TrackingOrigin relativeOrigin = (previousTrackingOrigin != OVRManager.TrackingOrigin.Stage) ? OVRManager.TrackingOrigin.Stage : OVRManager.TrackingOrigin.EyeLevel;
  105. previousRelativeTrackingSpacePose = OVRPlugin.GetTrackingTransformRelativePose((OVRPlugin.TrackingOrigin)relativeOrigin).ToOVRPose();
  106. }
  107. }
  108. /// <summary>
  109. /// Updates the internal state of the OVRDisplay. Called by OVRManager.
  110. /// </summary>
  111. public void Update()
  112. {
  113. UpdateTextures();
  114. if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
  115. {
  116. if (RecenteredPose != null)
  117. {
  118. RecenteredPose();
  119. }
  120. recenterRequested = false;
  121. recenterRequestedFrameCount = int.MaxValue;
  122. }
  123. if (OVRPlugin.GetSystemHeadsetType() == OVRPlugin.SystemHeadset.Oculus_Quest)
  124. {
  125. OVRManager.TrackingOrigin relativeOrigin = (OVRManager.instance.trackingOriginType != OVRManager.TrackingOrigin.Stage) ? OVRManager.TrackingOrigin.Stage : OVRManager.TrackingOrigin.EyeLevel;
  126. OVRPose relativeTrackingSpacePose = OVRPlugin.GetTrackingTransformRelativePose((OVRPlugin.TrackingOrigin)relativeOrigin).ToOVRPose();
  127. //If the tracking origin type hasn't switched and the relative pose changes, a recenter occurred.
  128. if (previousTrackingOrigin == OVRManager.instance.trackingOriginType && previousRelativeTrackingSpacePose != relativeTrackingSpacePose && RecenteredPose != null)
  129. {
  130. RecenteredPose();
  131. }
  132. previousRelativeTrackingSpacePose = relativeTrackingSpacePose;
  133. previousTrackingOrigin = OVRManager.instance.trackingOriginType;
  134. }
  135. }
  136. /// <summary>
  137. /// Occurs when the head pose is reset.
  138. /// </summary>
  139. public event System.Action RecenteredPose;
  140. /// <summary>
  141. /// Recenters the head pose.
  142. /// </summary>
  143. public void RecenterPose()
  144. {
  145. #if UNITY_2017_2_OR_NEWER
  146. UnityEngine.XR.InputTracking.Recenter();
  147. #else
  148. UnityEngine.VR.InputTracking.Recenter();
  149. #endif
  150. // The current poses are cached for the current frame and won't be updated immediately
  151. // after UnityEngine.VR.InputTracking.Recenter(). So we need to wait until next frame
  152. // to trigger the RecenteredPose delegate. The application could expect the correct pose
  153. // when the RecenteredPose delegate get called.
  154. recenterRequested = true;
  155. recenterRequestedFrameCount = Time.frameCount;
  156. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  157. OVRMixedReality.RecenterPose();
  158. #endif
  159. }
  160. /// <summary>
  161. /// Gets the current linear acceleration of the head.
  162. /// </summary>
  163. public Vector3 acceleration
  164. {
  165. get {
  166. if (!OVRManager.isHmdPresent)
  167. return Vector3.zero;
  168. Vector3 retVec = Vector3.zero;
  169. if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Acceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
  170. return retVec;
  171. return Vector3.zero;
  172. }
  173. }
  174. /// <summary>
  175. /// Gets the current angular acceleration of the head in radians per second per second about each axis.
  176. /// </summary>
  177. public Vector3 angularAcceleration
  178. {
  179. get
  180. {
  181. if (!OVRManager.isHmdPresent)
  182. return Vector3.zero;
  183. Vector3 retVec = Vector3.zero;
  184. if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularAcceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
  185. return retVec;
  186. return Vector3.zero;
  187. }
  188. }
  189. /// <summary>
  190. /// Gets the current linear velocity of the head in meters per second.
  191. /// </summary>
  192. public Vector3 velocity
  193. {
  194. get
  195. {
  196. if (!OVRManager.isHmdPresent)
  197. return Vector3.zero;
  198. Vector3 retVec = Vector3.zero;
  199. if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Velocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
  200. return retVec;
  201. return Vector3.zero;
  202. }
  203. }
  204. /// <summary>
  205. /// Gets the current angular velocity of the head in radians per second about each axis.
  206. /// </summary>
  207. public Vector3 angularVelocity
  208. {
  209. get {
  210. if (!OVRManager.isHmdPresent)
  211. return Vector3.zero;
  212. Vector3 retVec = Vector3.zero;
  213. if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularVelocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
  214. return retVec;
  215. return Vector3.zero;
  216. }
  217. }
  218. /// <summary>
  219. /// Gets the resolution and field of view for the given eye.
  220. /// </summary>
  221. #if UNITY_2017_2_OR_NEWER
  222. public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
  223. #else
  224. public EyeRenderDesc GetEyeRenderDesc(UnityEngine.VR.VRNode eye)
  225. #endif
  226. {
  227. return eyeDescs[(int)eye];
  228. }
  229. /// <summary>
  230. /// Gets the current measured latency values.
  231. /// </summary>
  232. public LatencyData latency
  233. {
  234. get {
  235. if (!OVRManager.isHmdPresent)
  236. return new LatencyData();
  237. string latency = OVRPlugin.latency;
  238. var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
  239. var ret = new LatencyData();
  240. Match match = r.Match(latency);
  241. if (match.Success)
  242. {
  243. ret.render = float.Parse(match.Groups[1].Value);
  244. ret.timeWarp = float.Parse(match.Groups[2].Value);
  245. ret.postPresent = float.Parse(match.Groups[3].Value);
  246. }
  247. return ret;
  248. }
  249. }
  250. /// <summary>
  251. /// Gets application's frame rate reported by oculus plugin
  252. /// </summary>
  253. public float appFramerate
  254. {
  255. get
  256. {
  257. if (!OVRManager.isHmdPresent)
  258. return 0;
  259. return OVRPlugin.GetAppFramerate();
  260. }
  261. }
  262. /// <summary>
  263. /// Gets the recommended MSAA level for optimal quality/performance the current device.
  264. /// </summary>
  265. public int recommendedMSAALevel
  266. {
  267. get
  268. {
  269. int result = OVRPlugin.recommendedMSAALevel;
  270. if (result == 1)
  271. result = 0;
  272. return result;
  273. }
  274. }
  275. /// <summary>
  276. /// Gets the list of available display frequencies supported by this hardware.
  277. /// </summary>
  278. public float[] displayFrequenciesAvailable
  279. {
  280. get { return OVRPlugin.systemDisplayFrequenciesAvailable; }
  281. }
  282. /// <summary>
  283. /// Gets and sets the current display frequency.
  284. /// </summary>
  285. public float displayFrequency
  286. {
  287. get
  288. {
  289. return OVRPlugin.systemDisplayFrequency;
  290. }
  291. set
  292. {
  293. OVRPlugin.systemDisplayFrequency = value;
  294. }
  295. }
  296. private void UpdateTextures()
  297. {
  298. #if UNITY_2017_2_OR_NEWER
  299. ConfigureEyeDesc(UnityEngine.XR.XRNode.LeftEye);
  300. ConfigureEyeDesc(UnityEngine.XR.XRNode.RightEye);
  301. #else
  302. ConfigureEyeDesc(UnityEngine.VR.VRNode.LeftEye);
  303. ConfigureEyeDesc(UnityEngine.VR.VRNode.RightEye);
  304. #endif
  305. }
  306. #if UNITY_2017_2_OR_NEWER
  307. private void ConfigureEyeDesc(UnityEngine.XR.XRNode eye)
  308. #else
  309. private void ConfigureEyeDesc(UnityEngine.VR.VRNode eye)
  310. #endif
  311. {
  312. if (!OVRManager.isHmdPresent)
  313. return;
  314. int eyeTextureWidth = Settings.eyeTextureWidth;
  315. int eyeTextureHeight = Settings.eyeTextureHeight;
  316. eyeDescs[(int)eye] = new EyeRenderDesc();
  317. eyeDescs[(int)eye].resolution = new Vector2(eyeTextureWidth, eyeTextureHeight);
  318. OVRPlugin.Frustumf2 frust;
  319. if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust))
  320. {
  321. eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan);
  322. eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan);
  323. eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan);
  324. eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan);
  325. }
  326. else
  327. {
  328. OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
  329. eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
  330. eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
  331. eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
  332. eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
  333. }
  334. // Symmetric Fov uses the maximum fov angle
  335. float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov);
  336. float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov);
  337. eyeDescs[(int)eye].fov.x = maxFovX * 2.0f;
  338. eyeDescs[(int)eye].fov.y = maxFovY * 2.0f;
  339. if (!OVRPlugin.AsymmetricFovEnabled)
  340. {
  341. eyeDescs[(int)eye].fullFov.LeftFov = maxFovX;
  342. eyeDescs[(int)eye].fullFov.RightFov = maxFovX;
  343. eyeDescs[(int)eye].fullFov.UpFov = maxFovY;
  344. eyeDescs[(int)eye].fullFov.DownFov = maxFovY;
  345. }
  346. }
  347. }