Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2016 lines
55 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. #if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
  14. #define USING_XR_SDK
  15. #endif
  16. #if UNITY_ANDROID && !UNITY_EDITOR
  17. #define OVR_ANDROID_MRC
  18. #endif
  19. #if !UNITY_5_6_OR_NEWER
  20. #error Oculus Utilities require Unity 5.6 or higher.
  21. #endif
  22. using System;
  23. using System.Collections.Generic;
  24. using UnityEngine;
  25. #if UNITY_EDITOR
  26. using UnityEditor;
  27. #endif
  28. #if USING_XR_SDK
  29. using UnityEngine.XR;
  30. using UnityEngine.Experimental.XR;
  31. #endif
  32. #if UNITY_2017_2_OR_NEWER
  33. using Settings = UnityEngine.XR.XRSettings;
  34. using Node = UnityEngine.XR.XRNode;
  35. #else
  36. using Settings = UnityEngine.VR.VRSettings;
  37. using Node = UnityEngine.VR.VRNode;
  38. #endif
  39. /// <summary>
  40. /// Configuration data for Oculus virtual reality.
  41. /// </summary>
  42. public class OVRManager : MonoBehaviour
  43. {
  44. public enum TrackingOrigin
  45. {
  46. EyeLevel = OVRPlugin.TrackingOrigin.EyeLevel,
  47. FloorLevel = OVRPlugin.TrackingOrigin.FloorLevel,
  48. Stage = OVRPlugin.TrackingOrigin.Stage,
  49. }
  50. public enum EyeTextureFormat
  51. {
  52. Default = OVRPlugin.EyeTextureFormat.Default,
  53. R16G16B16A16_FP = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP,
  54. R11G11B10_FP = OVRPlugin.EyeTextureFormat.R11G11B10_FP,
  55. }
  56. public enum FixedFoveatedRenderingLevel
  57. {
  58. Off = OVRPlugin.FixedFoveatedRenderingLevel.Off,
  59. Low = OVRPlugin.FixedFoveatedRenderingLevel.Low,
  60. Medium = OVRPlugin.FixedFoveatedRenderingLevel.Medium,
  61. High = OVRPlugin.FixedFoveatedRenderingLevel.High,
  62. HighTop = OVRPlugin.FixedFoveatedRenderingLevel.HighTop,
  63. }
  64. [Obsolete("Please use FixedFoveatedRenderingLevel instead")]
  65. public enum TiledMultiResLevel
  66. {
  67. Off = OVRPlugin.TiledMultiResLevel.Off,
  68. LMSLow = OVRPlugin.TiledMultiResLevel.LMSLow,
  69. LMSMedium = OVRPlugin.TiledMultiResLevel.LMSMedium,
  70. LMSHigh = OVRPlugin.TiledMultiResLevel.LMSHigh,
  71. LMSHighTop = OVRPlugin.TiledMultiResLevel.LMSHighTop,
  72. }
  73. public enum XRDevice
  74. {
  75. Unknown = 0,
  76. Oculus = 1,
  77. OpenVR = 2,
  78. }
  79. /// <summary>
  80. /// Gets the singleton instance.
  81. /// </summary>
  82. public static OVRManager instance { get; private set; }
  83. /// <summary>
  84. /// Gets a reference to the active display.
  85. /// </summary>
  86. public static OVRDisplay display { get; private set; }
  87. /// <summary>
  88. /// Gets a reference to the active sensor.
  89. /// </summary>
  90. public static OVRTracker tracker { get; private set; }
  91. /// <summary>
  92. /// Gets a reference to the active boundary system.
  93. /// </summary>
  94. public static OVRBoundary boundary { get; private set; }
  95. private static OVRProfile _profile;
  96. /// <summary>
  97. /// Gets the current profile, which contains information about the user's settings and body dimensions.
  98. /// </summary>
  99. public static OVRProfile profile
  100. {
  101. get {
  102. if (_profile == null)
  103. _profile = new OVRProfile();
  104. return _profile;
  105. }
  106. }
  107. private IEnumerable<Camera> disabledCameras;
  108. float prevTimeScale;
  109. /// <summary>
  110. /// Occurs when an HMD attached.
  111. /// </summary>
  112. public static event Action HMDAcquired;
  113. /// <summary>
  114. /// Occurs when an HMD detached.
  115. /// </summary>
  116. public static event Action HMDLost;
  117. /// <summary>
  118. /// Occurs when an HMD is put on the user's head.
  119. /// </summary>
  120. public static event Action HMDMounted;
  121. /// <summary>
  122. /// Occurs when an HMD is taken off the user's head.
  123. /// </summary>
  124. public static event Action HMDUnmounted;
  125. /// <summary>
  126. /// Occurs when VR Focus is acquired.
  127. /// </summary>
  128. public static event Action VrFocusAcquired;
  129. /// <summary>
  130. /// Occurs when VR Focus is lost.
  131. /// </summary>
  132. public static event Action VrFocusLost;
  133. /// <summary>
  134. /// Occurs when Input Focus is acquired.
  135. /// </summary>
  136. public static event Action InputFocusAcquired;
  137. /// <summary>
  138. /// Occurs when Input Focus is lost.
  139. /// </summary>
  140. public static event Action InputFocusLost;
  141. /// <summary>
  142. /// Occurs when the active Audio Out device has changed and a restart is needed.
  143. /// </summary>
  144. public static event Action AudioOutChanged;
  145. /// <summary>
  146. /// Occurs when the active Audio In device has changed and a restart is needed.
  147. /// </summary>
  148. public static event Action AudioInChanged;
  149. /// <summary>
  150. /// Occurs when the sensor gained tracking.
  151. /// </summary>
  152. public static event Action TrackingAcquired;
  153. /// <summary>
  154. /// Occurs when the sensor lost tracking.
  155. /// </summary>
  156. public static event Action TrackingLost;
  157. /// <summary>
  158. /// Occurs when Health & Safety Warning is dismissed.
  159. /// </summary>
  160. //Disable the warning about it being unused. It's deprecated.
  161. #pragma warning disable 0067
  162. [Obsolete]
  163. public static event Action HSWDismissed;
  164. #pragma warning restore
  165. private static bool _isHmdPresentCached = false;
  166. private static bool _isHmdPresent = false;
  167. private static bool _wasHmdPresent = false;
  168. /// <summary>
  169. /// If true, a head-mounted display is connected and present.
  170. /// </summary>
  171. public static bool isHmdPresent
  172. {
  173. get {
  174. if (!_isHmdPresentCached)
  175. {
  176. _isHmdPresentCached = true;
  177. _isHmdPresent = OVRNodeStateProperties.IsHmdPresent();
  178. }
  179. return _isHmdPresent;
  180. }
  181. private set {
  182. _isHmdPresentCached = true;
  183. _isHmdPresent = value;
  184. }
  185. }
  186. /// <summary>
  187. /// Gets the audio output device identifier.
  188. /// </summary>
  189. /// <description>
  190. /// On Windows, this is a string containing the GUID of the IMMDevice for the Windows audio endpoint to use.
  191. /// </description>
  192. public static string audioOutId
  193. {
  194. get { return OVRPlugin.audioOutId; }
  195. }
  196. /// <summary>
  197. /// Gets the audio input device identifier.
  198. /// </summary>
  199. /// <description>
  200. /// On Windows, this is a string containing the GUID of the IMMDevice for the Windows audio endpoint to use.
  201. /// </description>
  202. public static string audioInId
  203. {
  204. get { return OVRPlugin.audioInId; }
  205. }
  206. private static bool _hasVrFocusCached = false;
  207. private static bool _hasVrFocus = false;
  208. private static bool _hadVrFocus = false;
  209. /// <summary>
  210. /// If true, the app has VR Focus.
  211. /// </summary>
  212. public static bool hasVrFocus
  213. {
  214. get {
  215. if (!_hasVrFocusCached)
  216. {
  217. _hasVrFocusCached = true;
  218. _hasVrFocus = OVRPlugin.hasVrFocus;
  219. }
  220. return _hasVrFocus;
  221. }
  222. private set {
  223. _hasVrFocusCached = true;
  224. _hasVrFocus = value;
  225. }
  226. }
  227. private static bool _hadInputFocus = true;
  228. /// <summary>
  229. /// If true, the app has Input Focus.
  230. /// </summary>
  231. public static bool hasInputFocus
  232. {
  233. get
  234. {
  235. return OVRPlugin.hasInputFocus;
  236. }
  237. }
  238. /// <summary>
  239. /// If true, chromatic de-aberration will be applied, improving the image at the cost of texture bandwidth.
  240. /// </summary>
  241. public bool chromatic
  242. {
  243. get {
  244. if (!isHmdPresent)
  245. return false;
  246. return OVRPlugin.chromatic;
  247. }
  248. set {
  249. if (!isHmdPresent)
  250. return;
  251. OVRPlugin.chromatic = value;
  252. }
  253. }
  254. [Header("Performance/Quality")]
  255. /// <summary>
  256. /// If true, distortion rendering work is submitted a quarter-frame early to avoid pipeline stalls and increase CPU-GPU parallelism.
  257. /// </summary>
  258. [Tooltip("If true, distortion rendering work is submitted a quarter-frame early to avoid pipeline stalls and increase CPU-GPU parallelism.")]
  259. public bool queueAhead = true;
  260. /// <summary>
  261. /// If true, Unity will use the optimal antialiasing level for quality/performance on the current hardware.
  262. /// </summary>
  263. [Tooltip("If true, Unity will use the optimal antialiasing level for quality/performance on the current hardware.")]
  264. public bool useRecommendedMSAALevel = true;
  265. /// <summary>
  266. /// If true, both eyes will see the same image, rendered from the center eye pose, saving performance.
  267. /// </summary>
  268. [SerializeField]
  269. [Tooltip("If true, both eyes will see the same image, rendered from the center eye pose, saving performance.")]
  270. private bool _monoscopic = false;
  271. public bool monoscopic
  272. {
  273. get
  274. {
  275. if (!isHmdPresent)
  276. return _monoscopic;
  277. return OVRPlugin.monoscopic;
  278. }
  279. set
  280. {
  281. if (!isHmdPresent)
  282. return;
  283. OVRPlugin.monoscopic = value;
  284. _monoscopic = value;
  285. }
  286. }
  287. /// <summary>
  288. /// If true, dynamic resolution will be enabled
  289. /// </summary>
  290. [Tooltip("If true, dynamic resolution will be enabled On PC")]
  291. public bool enableAdaptiveResolution = false;
  292. /// <summary>
  293. /// Adaptive Resolution is based on Unity engine's renderViewportScale/eyeTextureResolutionScale feature
  294. /// But renderViewportScale was broken in an array of Unity engines, this function help to filter out those broken engines
  295. /// </summary>
  296. public static bool IsAdaptiveResSupportedByEngine()
  297. {
  298. #if UNITY_2017_1_OR_NEWER
  299. return Application.unityVersion != "2017.1.0f1";
  300. #else
  301. return false;
  302. #endif
  303. }
  304. /// <summary>
  305. /// Min RenderScale the app can reach under adaptive resolution mode ( enableAdaptiveResolution = true );
  306. /// </summary>
  307. [RangeAttribute(0.5f, 2.0f)]
  308. [Tooltip("Min RenderScale the app can reach under adaptive resolution mode")]
  309. public float minRenderScale = 0.7f;
  310. /// <summary>
  311. /// Max RenderScale the app can reach under adaptive resolution mode ( enableAdaptiveResolution = true );
  312. /// </summary>
  313. [RangeAttribute(0.5f, 2.0f)]
  314. [Tooltip("Max RenderScale the app can reach under adaptive resolution mode")]
  315. public float maxRenderScale = 1.0f;
  316. /// <summary>
  317. /// Set the relative offset rotation of head poses
  318. /// </summary>
  319. [SerializeField]
  320. [Tooltip("Set the relative offset rotation of head poses")]
  321. private Vector3 _headPoseRelativeOffsetRotation;
  322. public Vector3 headPoseRelativeOffsetRotation
  323. {
  324. get
  325. {
  326. return _headPoseRelativeOffsetRotation;
  327. }
  328. set
  329. {
  330. OVRPlugin.Quatf rotation;
  331. OVRPlugin.Vector3f translation;
  332. if (OVRPlugin.GetHeadPoseModifier(out rotation, out translation))
  333. {
  334. Quaternion finalRotation = Quaternion.Euler(value);
  335. rotation = finalRotation.ToQuatf();
  336. OVRPlugin.SetHeadPoseModifier(ref rotation, ref translation);
  337. }
  338. _headPoseRelativeOffsetRotation = value;
  339. }
  340. }
  341. /// <summary>
  342. /// Set the relative offset translation of head poses
  343. /// </summary>
  344. [SerializeField]
  345. [Tooltip("Set the relative offset translation of head poses")]
  346. private Vector3 _headPoseRelativeOffsetTranslation;
  347. public Vector3 headPoseRelativeOffsetTranslation
  348. {
  349. get
  350. {
  351. return _headPoseRelativeOffsetTranslation;
  352. }
  353. set
  354. {
  355. OVRPlugin.Quatf rotation;
  356. OVRPlugin.Vector3f translation;
  357. if (OVRPlugin.GetHeadPoseModifier(out rotation, out translation))
  358. {
  359. if (translation.FromFlippedZVector3f() != value)
  360. {
  361. translation = value.ToFlippedZVector3f();
  362. OVRPlugin.SetHeadPoseModifier(ref rotation, ref translation);
  363. }
  364. }
  365. _headPoseRelativeOffsetTranslation = value;
  366. }
  367. }
  368. /// <summary>
  369. /// The TCP listening port of Oculus Profiler Service, which will be activated in Debug/Developerment builds
  370. /// When the app is running on editor or device, open "Tools/Oculus/Oculus Profiler Panel" to view the realtime system metrics
  371. /// </summary>
  372. public int profilerTcpPort = OVRSystemPerfMetrics.TcpListeningPort;
  373. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
  374. /// <summary>
  375. /// If true, the MixedRealityCapture properties will be displayed
  376. /// </summary>
  377. [HideInInspector]
  378. public bool expandMixedRealityCapturePropertySheet = false;
  379. /// <summary>
  380. /// If true, Mixed Reality mode will be enabled
  381. /// </summary>
  382. [HideInInspector, Tooltip("If true, Mixed Reality mode will be enabled. It would be always set to false when the game is launching without editor")]
  383. public bool enableMixedReality = false;
  384. public enum CompositionMethod
  385. {
  386. External,
  387. Direct
  388. }
  389. /// <summary>
  390. /// Composition method
  391. /// </summary>
  392. [HideInInspector]
  393. public CompositionMethod compositionMethod = CompositionMethod.External;
  394. /// <summary>
  395. /// Extra hidden layers
  396. /// </summary>
  397. [HideInInspector, Tooltip("Extra hidden layers")]
  398. public LayerMask extraHiddenLayers;
  399. /// <summary>
  400. /// The backdrop color will be used when rendering the foreground frames (on Rift). It only applies to External Composition.
  401. /// </summary>
  402. [HideInInspector, Tooltip("Backdrop color for Rift (External Compositon)")]
  403. public Color externalCompositionBackdropColorRift = Color.green;
  404. /// <summary>
  405. /// The backdrop color will be used when rendering the foreground frames (on Quest). It only applies to External Composition.
  406. /// </summary>
  407. [HideInInspector, Tooltip("Backdrop color for Quest (External Compositon)")]
  408. public Color externalCompositionBackdropColorQuest = Color.clear;
  409. /// <summary>
  410. /// If true, Mixed Reality mode will use direct composition from the first web camera
  411. /// </summary>
  412. public enum CameraDevice
  413. {
  414. WebCamera0,
  415. WebCamera1,
  416. ZEDCamera
  417. }
  418. /// <summary>
  419. /// The camera device for direct composition
  420. /// </summary>
  421. [HideInInspector, Tooltip("The camera device for direct composition")]
  422. public CameraDevice capturingCameraDevice = CameraDevice.WebCamera0;
  423. /// <summary>
  424. /// Flip the camera frame horizontally
  425. /// </summary>
  426. [HideInInspector, Tooltip("Flip the camera frame horizontally")]
  427. public bool flipCameraFrameHorizontally = false;
  428. /// <summary>
  429. /// Flip the camera frame vertically
  430. /// </summary>
  431. [HideInInspector, Tooltip("Flip the camera frame vertically")]
  432. public bool flipCameraFrameVertically = false;
  433. /// <summary>
  434. /// Delay the touch controller pose by a short duration (0 to 0.5 second) to match the physical camera latency
  435. /// </summary>
  436. [HideInInspector, Tooltip("Delay the touch controller pose by a short duration (0 to 0.5 second) to match the physical camera latency")]
  437. public float handPoseStateLatency = 0.0f;
  438. /// <summary>
  439. /// Delay the foreground / background image in the sandwich composition to match the physical camera latency. The maximum duration is sandwichCompositionBufferedFrames / {Game FPS}
  440. /// </summary>
  441. [HideInInspector, Tooltip("Delay the foreground / background image in the sandwich composition to match the physical camera latency. The maximum duration is sandwichCompositionBufferedFrames / {Game FPS}")]
  442. public float sandwichCompositionRenderLatency = 0.0f;
  443. /// <summary>
  444. /// The number of frames are buffered in the SandWich composition. The more buffered frames, the more memory it would consume.
  445. /// </summary>
  446. [HideInInspector, Tooltip("The number of frames are buffered in the SandWich composition. The more buffered frames, the more memory it would consume.")]
  447. public int sandwichCompositionBufferedFrames = 8;
  448. /// <summary>
  449. /// Chroma Key Color
  450. /// </summary>
  451. [HideInInspector, Tooltip("Chroma Key Color")]
  452. public Color chromaKeyColor = Color.green;
  453. /// <summary>
  454. /// Chroma Key Similarity
  455. /// </summary>
  456. [HideInInspector, Tooltip("Chroma Key Similarity")]
  457. public float chromaKeySimilarity = 0.60f;
  458. /// <summary>
  459. /// Chroma Key Smooth Range
  460. /// </summary>
  461. [HideInInspector, Tooltip("Chroma Key Smooth Range")]
  462. public float chromaKeySmoothRange = 0.03f;
  463. /// <summary>
  464. /// Chroma Key Spill Range
  465. /// </summary>
  466. [HideInInspector, Tooltip("Chroma Key Spill Range")]
  467. public float chromaKeySpillRange = 0.06f;
  468. /// <summary>
  469. /// Use dynamic lighting (Depth sensor required)
  470. /// </summary>
  471. [HideInInspector, Tooltip("Use dynamic lighting (Depth sensor required)")]
  472. public bool useDynamicLighting = false;
  473. public enum DepthQuality
  474. {
  475. Low,
  476. Medium,
  477. High
  478. }
  479. /// <summary>
  480. /// The quality level of depth image. The lighting could be more smooth and accurate with high quality depth, but it would also be more costly in performance.
  481. /// </summary>
  482. [HideInInspector, Tooltip("The quality level of depth image. The lighting could be more smooth and accurate with high quality depth, but it would also be more costly in performance.")]
  483. public DepthQuality depthQuality = DepthQuality.Medium;
  484. /// <summary>
  485. /// Smooth factor in dynamic lighting. Larger is smoother
  486. /// </summary>
  487. [HideInInspector, Tooltip("Smooth factor in dynamic lighting. Larger is smoother")]
  488. public float dynamicLightingSmoothFactor = 8.0f;
  489. /// <summary>
  490. /// The maximum depth variation across the edges. Make it smaller to smooth the lighting on the edges.
  491. /// </summary>
  492. [HideInInspector, Tooltip("The maximum depth variation across the edges. Make it smaller to smooth the lighting on the edges.")]
  493. public float dynamicLightingDepthVariationClampingValue = 0.001f;
  494. public enum VirtualGreenScreenType
  495. {
  496. Off,
  497. OuterBoundary,
  498. PlayArea
  499. }
  500. /// <summary>
  501. /// Set the current type of the virtual green screen
  502. /// </summary>
  503. [HideInInspector, Tooltip("Type of virutal green screen ")]
  504. public VirtualGreenScreenType virtualGreenScreenType = VirtualGreenScreenType.Off;
  505. /// <summary>
  506. /// Top Y of virtual screen
  507. /// </summary>
  508. [HideInInspector, Tooltip("Top Y of virtual green screen")]
  509. public float virtualGreenScreenTopY = 10.0f;
  510. /// <summary>
  511. /// Bottom Y of virtual screen
  512. /// </summary>
  513. [HideInInspector, Tooltip("Bottom Y of virtual green screen")]
  514. public float virtualGreenScreenBottomY = -10.0f;
  515. /// <summary>
  516. /// When using a depth camera (e.g. ZED), whether to use the depth in virtual green screen culling.
  517. /// </summary>
  518. [HideInInspector, Tooltip("When using a depth camera (e.g. ZED), whether to use the depth in virtual green screen culling.")]
  519. public bool virtualGreenScreenApplyDepthCulling = false;
  520. /// <summary>
  521. /// The tolerance value (in meter) when using the virtual green screen with a depth camera. Make it bigger if the foreground objects got culled incorrectly.
  522. /// </summary>
  523. [HideInInspector, Tooltip("The tolerance value (in meter) when using the virtual green screen with a depth camera. Make it bigger if the foreground objects got culled incorrectly.")]
  524. public float virtualGreenScreenDepthTolerance = 0.2f;
  525. public enum MrcActivationMode
  526. {
  527. Automatic,
  528. Disabled
  529. }
  530. /// <summary>
  531. /// (Quest-only) control if the mixed reality capture mode can be activated automatically through remote network connection.
  532. /// </summary>
  533. [HideInInspector, Tooltip("(Quest-only) control if the mixed reality capture mode can be activated automatically through remote network connection.")]
  534. public MrcActivationMode mrcActivationMode;
  535. #endif
  536. /// <summary>
  537. /// The number of expected display frames per rendered frame.
  538. /// </summary>
  539. public int vsyncCount
  540. {
  541. get {
  542. if (!isHmdPresent)
  543. return 1;
  544. return OVRPlugin.vsyncCount;
  545. }
  546. set {
  547. if (!isHmdPresent)
  548. return;
  549. OVRPlugin.vsyncCount = value;
  550. }
  551. }
  552. public static string OCULUS_UNITY_NAME_STR = "Oculus";
  553. public static string OPENVR_UNITY_NAME_STR = "OpenVR";
  554. public static XRDevice loadedXRDevice;
  555. /// <summary>
  556. /// Gets the current battery level.
  557. /// </summary>
  558. /// <returns><c>battery level in the range [0.0,1.0]</c>
  559. /// <param name="batteryLevel">Battery level.</param>
  560. public static float batteryLevel
  561. {
  562. get {
  563. if (!isHmdPresent)
  564. return 1f;
  565. return OVRPlugin.batteryLevel;
  566. }
  567. }
  568. /// <summary>
  569. /// Gets the current battery temperature.
  570. /// </summary>
  571. /// <returns><c>battery temperature in Celsius</c>
  572. /// <param name="batteryTemperature">Battery temperature.</param>
  573. public static float batteryTemperature
  574. {
  575. get {
  576. if (!isHmdPresent)
  577. return 0f;
  578. return OVRPlugin.batteryTemperature;
  579. }
  580. }
  581. /// <summary>
  582. /// Gets the current battery status.
  583. /// </summary>
  584. /// <returns><c>battery status</c>
  585. /// <param name="batteryStatus">Battery status.</param>
  586. public static int batteryStatus
  587. {
  588. get {
  589. if (!isHmdPresent)
  590. return -1;
  591. return (int)OVRPlugin.batteryStatus;
  592. }
  593. }
  594. /// <summary>
  595. /// Gets the current volume level.
  596. /// </summary>
  597. /// <returns><c>volume level in the range [0,1].</c>
  598. public static float volumeLevel
  599. {
  600. get {
  601. if (!isHmdPresent)
  602. return 0f;
  603. return OVRPlugin.systemVolume;
  604. }
  605. }
  606. /// <summary>
  607. /// Gets or sets the current CPU performance level (0-2). Lower performance levels save more power.
  608. /// </summary>
  609. public static int cpuLevel
  610. {
  611. get {
  612. if (!isHmdPresent)
  613. return 2;
  614. return OVRPlugin.cpuLevel;
  615. }
  616. set {
  617. if (!isHmdPresent)
  618. return;
  619. OVRPlugin.cpuLevel = value;
  620. }
  621. }
  622. /// <summary>
  623. /// Gets or sets the current GPU performance level (0-2). Lower performance levels save more power.
  624. /// </summary>
  625. public static int gpuLevel
  626. {
  627. get {
  628. if (!isHmdPresent)
  629. return 2;
  630. return OVRPlugin.gpuLevel;
  631. }
  632. set {
  633. if (!isHmdPresent)
  634. return;
  635. OVRPlugin.gpuLevel = value;
  636. }
  637. }
  638. /// <summary>
  639. /// If true, the CPU and GPU are currently throttled to save power and/or reduce the temperature.
  640. /// </summary>
  641. public static bool isPowerSavingActive
  642. {
  643. get {
  644. if (!isHmdPresent)
  645. return false;
  646. return OVRPlugin.powerSaving;
  647. }
  648. }
  649. /// <summary>
  650. /// Gets or sets the eye texture format.
  651. /// </summary>
  652. public static EyeTextureFormat eyeTextureFormat
  653. {
  654. get
  655. {
  656. return (OVRManager.EyeTextureFormat)OVRPlugin.GetDesiredEyeTextureFormat();
  657. }
  658. set
  659. {
  660. OVRPlugin.SetDesiredEyeTextureFormat((OVRPlugin.EyeTextureFormat)value);
  661. }
  662. }
  663. /// <summary>
  664. /// Gets if tiled-based multi-resolution technique is supported
  665. /// This feature is only supported on QCOMM-based Android devices
  666. /// </summary>
  667. public static bool fixedFoveatedRenderingSupported
  668. {
  669. get
  670. {
  671. return OVRPlugin.fixedFoveatedRenderingSupported;
  672. }
  673. }
  674. /// <summary>
  675. /// Gets or sets the tiled-based multi-resolution level
  676. /// This feature is only supported on QCOMM-based Android devices
  677. /// </summary>
  678. public static FixedFoveatedRenderingLevel fixedFoveatedRenderingLevel
  679. {
  680. get
  681. {
  682. if (!OVRPlugin.fixedFoveatedRenderingSupported)
  683. {
  684. Debug.LogWarning("Fixed Foveated Rendering feature is not supported");
  685. }
  686. return (FixedFoveatedRenderingLevel)OVRPlugin.fixedFoveatedRenderingLevel;
  687. }
  688. set
  689. {
  690. if (!OVRPlugin.fixedFoveatedRenderingSupported)
  691. {
  692. Debug.LogWarning("Fixed Foveated Rendering feature is not supported");
  693. }
  694. OVRPlugin.fixedFoveatedRenderingLevel = (OVRPlugin.FixedFoveatedRenderingLevel)value;
  695. }
  696. }
  697. /// <summary>
  698. /// Let the system decide the best foveation level adaptively (Off .. fixedFoveatedRenderingLevel)
  699. /// This feature is only supported on QCOMM-based Android devices
  700. /// </summary>
  701. public static bool useDynamicFixedFoveatedRendering
  702. {
  703. get
  704. {
  705. if (!OVRPlugin.fixedFoveatedRenderingSupported)
  706. {
  707. Debug.LogWarning("Fixed Foveated Rendering feature is not supported");
  708. }
  709. return OVRPlugin.useDynamicFixedFoveatedRendering;
  710. }
  711. set
  712. {
  713. if (!OVRPlugin.fixedFoveatedRenderingSupported)
  714. {
  715. Debug.LogWarning("Fixed Foveated Rendering feature is not supported");
  716. }
  717. OVRPlugin.useDynamicFixedFoveatedRendering = value;
  718. }
  719. }
  720. [Obsolete("Please use fixedFoveatedRenderingSupported instead", false)]
  721. public static bool tiledMultiResSupported
  722. {
  723. get
  724. {
  725. return OVRPlugin.tiledMultiResSupported;
  726. }
  727. }
  728. [Obsolete("Please use fixedFoveatedRenderingLevel instead", false)]
  729. public static TiledMultiResLevel tiledMultiResLevel
  730. {
  731. get
  732. {
  733. if (!OVRPlugin.tiledMultiResSupported)
  734. {
  735. Debug.LogWarning("Tiled-based Multi-resolution feature is not supported");
  736. }
  737. return (TiledMultiResLevel)OVRPlugin.tiledMultiResLevel;
  738. }
  739. set
  740. {
  741. if (!OVRPlugin.tiledMultiResSupported)
  742. {
  743. Debug.LogWarning("Tiled-based Multi-resolution feature is not supported");
  744. }
  745. OVRPlugin.tiledMultiResLevel = (OVRPlugin.TiledMultiResLevel)value;
  746. }
  747. }
  748. /// <summary>
  749. /// Gets if the GPU Utility is supported
  750. /// This feature is only supported on QCOMM-based Android devices
  751. /// </summary>
  752. public static bool gpuUtilSupported
  753. {
  754. get
  755. {
  756. return OVRPlugin.gpuUtilSupported;
  757. }
  758. }
  759. /// <summary>
  760. /// Gets the GPU Utilised Level (0.0 - 1.0)
  761. /// This feature is only supported on QCOMM-based Android devices
  762. /// </summary>
  763. public static float gpuUtilLevel
  764. {
  765. get
  766. {
  767. if (!OVRPlugin.gpuUtilSupported)
  768. {
  769. Debug.LogWarning("GPU Util is not supported");
  770. }
  771. return OVRPlugin.gpuUtilLevel;
  772. }
  773. }
  774. /// <summary>
  775. /// Sets the Color Scale and Offset which is commonly used for effects like fade-to-black.
  776. /// In our compositor, once a given frame is rendered, warped, and ready to be displayed, we then multiply
  777. /// each pixel by colorScale and add it to colorOffset, whereby newPixel = oldPixel * colorScale + colorOffset.
  778. /// Note that for mobile devices (Quest, Go, etc.), colorOffset is not supported, so colorScale is all that can
  779. /// be used. A colorScale of (1, 1, 1, 1) and colorOffset of (0, 0, 0, 0) will lead to an identity multiplication
  780. /// and have no effect.
  781. /// </summary>
  782. public static void SetColorScaleAndOffset(Vector4 colorScale, Vector4 colorOffset, bool applyToAllLayers)
  783. {
  784. OVRPlugin.SetColorScaleAndOffset(colorScale, colorOffset, applyToAllLayers);
  785. }
  786. /// <summary>
  787. /// Specifies OpenVR pose local to tracking space
  788. /// </summary>
  789. public static void SetOpenVRLocalPose(Vector3 leftPos, Vector3 rightPos, Quaternion leftRot, Quaternion rightRot)
  790. {
  791. if (loadedXRDevice == XRDevice.OpenVR)
  792. OVRInput.SetOpenVRLocalPose(leftPos, rightPos, leftRot, rightRot);
  793. }
  794. //Series of offsets that line up the virtual controllers to the phsyical world.
  795. private static Vector3 OpenVRTouchRotationOffsetEulerLeft = new Vector3(40.0f, 0.0f, 0.0f);
  796. private static Vector3 OpenVRTouchRotationOffsetEulerRight = new Vector3(40.0f, 0.0f, 0.0f);
  797. private static Vector3 OpenVRTouchPositionOffsetLeft = new Vector3(0.0075f, -0.005f, -0.0525f);
  798. private static Vector3 OpenVRTouchPositionOffsetRight = new Vector3(-0.0075f, -0.005f, -0.0525f);
  799. /// <summary>
  800. /// Specifies the pose offset required to make an OpenVR controller's reported pose match the virtual pose.
  801. /// Currently we only specify this offset for Oculus Touch on OpenVR.
  802. /// </summary>
  803. public static OVRPose GetOpenVRControllerOffset(Node hand)
  804. {
  805. OVRPose poseOffset = OVRPose.identity;
  806. if ((hand == Node.LeftHand || hand == Node.RightHand) && loadedXRDevice == XRDevice.OpenVR)
  807. {
  808. int index = (hand == Node.LeftHand) ? 0 : 1;
  809. if (OVRInput.openVRControllerDetails[index].controllerType == OVRInput.OpenVRController.OculusTouch)
  810. {
  811. Vector3 offsetOrientation = (hand == Node.LeftHand) ? OpenVRTouchRotationOffsetEulerLeft : OpenVRTouchRotationOffsetEulerRight;
  812. poseOffset.orientation = Quaternion.Euler(offsetOrientation.x, offsetOrientation.y, offsetOrientation.z);
  813. poseOffset.position = (hand == Node.LeftHand) ? OpenVRTouchPositionOffsetLeft : OpenVRTouchPositionOffsetRight;
  814. }
  815. }
  816. return poseOffset;
  817. }
  818. [Header("Tracking")]
  819. [SerializeField]
  820. [Tooltip("Defines the current tracking origin type.")]
  821. private OVRManager.TrackingOrigin _trackingOriginType = OVRManager.TrackingOrigin.EyeLevel;
  822. /// <summary>
  823. /// Defines the current tracking origin type.
  824. /// </summary>
  825. public OVRManager.TrackingOrigin trackingOriginType
  826. {
  827. get {
  828. if (!isHmdPresent)
  829. return _trackingOriginType;
  830. return (OVRManager.TrackingOrigin)OVRPlugin.GetTrackingOriginType();
  831. }
  832. set {
  833. if (!isHmdPresent)
  834. return;
  835. if (OVRPlugin.SetTrackingOriginType((OVRPlugin.TrackingOrigin)value))
  836. {
  837. // Keep the field exposed in the Unity Editor synchronized with any changes.
  838. _trackingOriginType = value;
  839. }
  840. }
  841. }
  842. /// <summary>
  843. /// If true, head tracking will affect the position of each OVRCameraRig's cameras.
  844. /// </summary>
  845. [Tooltip("If true, head tracking will affect the position of each OVRCameraRig's cameras.")]
  846. public bool usePositionTracking = true;
  847. /// <summary>
  848. /// If true, head tracking will affect the rotation of each OVRCameraRig's cameras.
  849. /// </summary>
  850. [HideInInspector]
  851. public bool useRotationTracking = true;
  852. /// <summary>
  853. /// If true, the distance between the user's eyes will affect the position of each OVRCameraRig's cameras.
  854. /// </summary>
  855. [Tooltip("If true, the distance between the user's eyes will affect the position of each OVRCameraRig's cameras.")]
  856. public bool useIPDInPositionTracking = true;
  857. /// <summary>
  858. /// If true, each scene load will cause the head pose to reset.
  859. /// </summary>
  860. [Tooltip("If true, each scene load will cause the head pose to reset.")]
  861. public bool resetTrackerOnLoad = false;
  862. /// <summary>
  863. /// If true, the Reset View in the universal menu will cause the pose to be reset. This should generally be
  864. /// enabled for applications with a stationary position in the virtual world and will allow the View Reset
  865. /// command to place the person back to a predefined location (such as a cockpit seat).
  866. /// Set this to false if you have a locomotion system because resetting the view would effectively teleport
  867. /// the player to potentially invalid locations.
  868. /// </summary>
  869. [Tooltip("If true, the Reset View in the universal menu will cause the pose to be reset. This should generally be enabled for applications with a stationary position in the virtual world and will allow the View Reset command to place the person back to a predefined location (such as a cockpit seat). Set this to false if you have a locomotion system because resetting the view would effectively teleport the player to potentially invalid locations.")]
  870. public bool AllowRecenter = true;
  871. [SerializeField]
  872. [Tooltip("Specifies HMD recentering behavior when controller recenter is performed. True recenters the HMD as well, false does not.")]
  873. private bool _reorientHMDOnControllerRecenter = true;
  874. /// <summary>
  875. /// Defines the recentering mode specified in the tooltip above.
  876. /// </summary>
  877. public bool reorientHMDOnControllerRecenter
  878. {
  879. get
  880. {
  881. if (!isHmdPresent)
  882. return false;
  883. return OVRPlugin.GetReorientHMDOnControllerRecenter();
  884. }
  885. set
  886. {
  887. if (!isHmdPresent)
  888. return;
  889. OVRPlugin.SetReorientHMDOnControllerRecenter(value);
  890. }
  891. }
  892. /// <summary>
  893. /// If true, a lower-latency update will occur right before rendering. If false, the only controller pose update will occur at the start of simulation for a given frame.
  894. /// Selecting this option lowers rendered latency for controllers and is often a net positive; however, it also creates a slight disconnect between rendered and simulated controller poses.
  895. /// Visit online Oculus documentation to learn more.
  896. /// </summary>
  897. [Tooltip("If true, rendered controller latency is reduced by several ms, as the left/right controllers will have their positions updated right before rendering.")]
  898. public bool LateControllerUpdate = true;
  899. /// <summary>
  900. /// True if the current platform supports virtual reality.
  901. /// </summary>
  902. public bool isSupportedPlatform { get; private set; }
  903. private static bool _isUserPresentCached = false;
  904. private static bool _isUserPresent = false;
  905. private static bool _wasUserPresent = false;
  906. /// <summary>
  907. /// True if the user is currently wearing the display.
  908. /// </summary>
  909. public bool isUserPresent
  910. {
  911. get {
  912. if (!_isUserPresentCached)
  913. {
  914. _isUserPresentCached = true;
  915. _isUserPresent = OVRPlugin.userPresent;
  916. }
  917. return _isUserPresent;
  918. }
  919. private set {
  920. _isUserPresentCached = true;
  921. _isUserPresent = value;
  922. }
  923. }
  924. private static bool prevAudioOutIdIsCached = false;
  925. private static bool prevAudioInIdIsCached = false;
  926. private static string prevAudioOutId = string.Empty;
  927. private static string prevAudioInId = string.Empty;
  928. private static bool wasPositionTracked = false;
  929. public static System.Version utilitiesVersion
  930. {
  931. get { return OVRPlugin.wrapperVersion; }
  932. }
  933. public static System.Version pluginVersion
  934. {
  935. get { return OVRPlugin.version; }
  936. }
  937. public static System.Version sdkVersion
  938. {
  939. get { return OVRPlugin.nativeSDKVersion; }
  940. }
  941. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
  942. private static bool MixedRealityEnabledFromCmd()
  943. {
  944. var args = System.Environment.GetCommandLineArgs();
  945. for (int i = 0; i < args.Length; i++)
  946. {
  947. if (args[i].ToLower() == "-mixedreality")
  948. return true;
  949. }
  950. return false;
  951. }
  952. private static bool UseDirectCompositionFromCmd()
  953. {
  954. var args = System.Environment.GetCommandLineArgs();
  955. for (int i = 0; i < args.Length; i++)
  956. {
  957. if (args[i].ToLower() == "-directcomposition")
  958. return true;
  959. }
  960. return false;
  961. }
  962. private static bool UseExternalCompositionFromCmd()
  963. {
  964. var args = System.Environment.GetCommandLineArgs();
  965. for (int i = 0; i < args.Length; i++)
  966. {
  967. if (args[i].ToLower() == "-externalcomposition")
  968. return true;
  969. }
  970. return false;
  971. }
  972. private static bool CreateMixedRealityCaptureConfigurationFileFromCmd()
  973. {
  974. var args = System.Environment.GetCommandLineArgs();
  975. for (int i = 0; i < args.Length; i++)
  976. {
  977. if (args[i].ToLower() == "-create_mrc_config")
  978. return true;
  979. }
  980. return false;
  981. }
  982. private static bool LoadMixedRealityCaptureConfigurationFileFromCmd()
  983. {
  984. var args = System.Environment.GetCommandLineArgs();
  985. for (int i = 0; i < args.Length; i++)
  986. {
  987. if (args[i].ToLower() == "-load_mrc_config")
  988. return true;
  989. }
  990. return false;
  991. }
  992. #endif
  993. public static bool IsUnityAlphaOrBetaVersion()
  994. {
  995. string ver = Application.unityVersion;
  996. int pos = ver.Length - 1;
  997. while (pos >= 0 && ver[pos] >= '0' && ver[pos] <= '9')
  998. {
  999. --pos;
  1000. }
  1001. if (pos >= 0 && (ver[pos] == 'a' || ver[pos] == 'b'))
  1002. return true;
  1003. return false;
  1004. }
  1005. public static string UnityAlphaOrBetaVersionWarningMessage = "WARNING: It's not recommended to use Unity alpha/beta release in Oculus development. Use a stable release if you encounter any issue.";
  1006. #region Unity Messages
  1007. public static bool OVRManagerinitialized = false;
  1008. private void InitOVRManager()
  1009. {
  1010. // Only allow one instance at runtime.
  1011. if (instance != null)
  1012. {
  1013. enabled = false;
  1014. DestroyImmediate(this);
  1015. return;
  1016. }
  1017. instance = this;
  1018. // uncomment the following line to disable the callstack printed to log
  1019. //Application.SetStackTraceLogType(LogType.Log, StackTraceLogType.None); // TEMPORARY
  1020. Debug.Log("Unity v" + Application.unityVersion + ", " +
  1021. "Oculus Utilities v" + OVRPlugin.wrapperVersion + ", " +
  1022. "OVRPlugin v" + OVRPlugin.version + ", " +
  1023. "SDK v" + OVRPlugin.nativeSDKVersion + ".");
  1024. #if !UNITY_EDITOR
  1025. if (IsUnityAlphaOrBetaVersion())
  1026. {
  1027. Debug.LogWarning(UnityAlphaOrBetaVersionWarningMessage);
  1028. }
  1029. #endif
  1030. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  1031. var supportedTypes =
  1032. UnityEngine.Rendering.GraphicsDeviceType.Direct3D11.ToString() + ", " +
  1033. UnityEngine.Rendering.GraphicsDeviceType.Direct3D12.ToString();
  1034. if (!supportedTypes.Contains(SystemInfo.graphicsDeviceType.ToString()))
  1035. Debug.LogWarning("VR rendering requires one of the following device types: (" + supportedTypes + "). Your graphics device: " + SystemInfo.graphicsDeviceType.ToString());
  1036. #endif
  1037. // Detect whether this platform is a supported platform
  1038. RuntimePlatform currPlatform = Application.platform;
  1039. if (currPlatform == RuntimePlatform.Android ||
  1040. // currPlatform == RuntimePlatform.LinuxPlayer ||
  1041. currPlatform == RuntimePlatform.OSXEditor ||
  1042. currPlatform == RuntimePlatform.OSXPlayer ||
  1043. currPlatform == RuntimePlatform.WindowsEditor ||
  1044. currPlatform == RuntimePlatform.WindowsPlayer)
  1045. {
  1046. isSupportedPlatform = true;
  1047. }
  1048. else
  1049. {
  1050. isSupportedPlatform = false;
  1051. }
  1052. if (!isSupportedPlatform)
  1053. {
  1054. Debug.LogWarning("This platform is unsupported");
  1055. return;
  1056. }
  1057. #if UNITY_ANDROID && !UNITY_EDITOR
  1058. // Turn off chromatic aberration by default to save texture bandwidth.
  1059. chromatic = false;
  1060. #endif
  1061. #if (UNITY_STANDALONE_WIN || UNITY_ANDROID) && !UNITY_EDITOR
  1062. enableMixedReality = false; // we should never start the standalone game in MxR mode, unless the command-line parameter is provided
  1063. #endif
  1064. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  1065. if (!staticMixedRealityCaptureInitialized)
  1066. {
  1067. bool loadMrcConfig = LoadMixedRealityCaptureConfigurationFileFromCmd();
  1068. bool createMrcConfig = CreateMixedRealityCaptureConfigurationFileFromCmd();
  1069. if (loadMrcConfig || createMrcConfig)
  1070. {
  1071. OVRMixedRealityCaptureSettings mrcSettings = ScriptableObject.CreateInstance<OVRMixedRealityCaptureSettings>();
  1072. mrcSettings.ReadFrom(this);
  1073. if (loadMrcConfig)
  1074. {
  1075. mrcSettings.CombineWithConfigurationFile();
  1076. mrcSettings.ApplyTo(this);
  1077. }
  1078. if (createMrcConfig)
  1079. {
  1080. mrcSettings.WriteToConfigurationFile();
  1081. }
  1082. ScriptableObject.Destroy(mrcSettings);
  1083. }
  1084. if (MixedRealityEnabledFromCmd())
  1085. {
  1086. enableMixedReality = true;
  1087. }
  1088. if (enableMixedReality)
  1089. {
  1090. Debug.Log("OVR: Mixed Reality mode enabled");
  1091. if (UseDirectCompositionFromCmd())
  1092. {
  1093. compositionMethod = CompositionMethod.Direct;
  1094. }
  1095. if (UseExternalCompositionFromCmd())
  1096. {
  1097. compositionMethod = CompositionMethod.External;
  1098. }
  1099. Debug.Log("OVR: CompositionMethod : " + compositionMethod);
  1100. }
  1101. }
  1102. #endif
  1103. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
  1104. StaticInitializeMixedRealityCapture(this);
  1105. #endif
  1106. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  1107. if (enableAdaptiveResolution && !OVRManager.IsAdaptiveResSupportedByEngine())
  1108. {
  1109. enableAdaptiveResolution = false;
  1110. UnityEngine.Debug.LogError("Your current Unity Engine " + Application.unityVersion + " might have issues to support adaptive resolution, please disable it under OVRManager");
  1111. }
  1112. #endif
  1113. Initialize();
  1114. if (resetTrackerOnLoad)
  1115. display.RecenterPose();
  1116. if (Debug.isDebugBuild)
  1117. {
  1118. // Activate system metrics collection in Debug/Developerment build
  1119. if (GetComponent<OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer>() == null)
  1120. {
  1121. gameObject.AddComponent<OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer>();
  1122. }
  1123. OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer perfTcpServer = GetComponent<OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer>();
  1124. perfTcpServer.listeningPort = profilerTcpPort;
  1125. if (!perfTcpServer.enabled)
  1126. {
  1127. perfTcpServer.enabled = true;
  1128. }
  1129. OVRPlugin.SetDeveloperMode(OVRPlugin.Bool.True);
  1130. }
  1131. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  1132. // Force OcculusionMesh on all the time, you can change the value to false if you really need it be off for some reasons,
  1133. // be aware there are performance drops if you don't use occlusionMesh.
  1134. OVRPlugin.occlusionMesh = true;
  1135. #endif
  1136. OVRManagerinitialized = true;
  1137. }
  1138. private void Awake()
  1139. {
  1140. #if !USING_XR_SDK
  1141. //For legacy, we should initialize OVRManager in all cases.
  1142. //For now, in XR SDK, only initialize if OVRPlugin is initialized.
  1143. InitOVRManager();
  1144. #else
  1145. if (OVRPlugin.initialized)
  1146. InitOVRManager();
  1147. #endif
  1148. }
  1149. #if UNITY_EDITOR
  1150. private static bool _scriptsReloaded;
  1151. [UnityEditor.Callbacks.DidReloadScripts]
  1152. static void ScriptsReloaded()
  1153. {
  1154. _scriptsReloaded = true;
  1155. }
  1156. #endif
  1157. void SetCurrentXRDevice()
  1158. {
  1159. #if USING_XR_SDK
  1160. XRDisplaySubsystem currentDisplaySubsystem = GetCurrentDisplaySubsystem();
  1161. XRDisplaySubsystemDescriptor currentDisplaySubsystemDescriptor = GetCurrentDisplaySubsystemDescriptor();
  1162. #endif
  1163. if (OVRPlugin.initialized)
  1164. {
  1165. loadedXRDevice = XRDevice.Oculus;
  1166. }
  1167. #if USING_XR_SDK
  1168. else if (currentDisplaySubsystem != null && currentDisplaySubsystemDescriptor != null && currentDisplaySubsystem.running)
  1169. #else
  1170. else if (Settings.enabled)
  1171. #endif
  1172. {
  1173. #if USING_XR_SDK
  1174. string loadedXRDeviceName = currentDisplaySubsystemDescriptor.id;
  1175. #else
  1176. string loadedXRDeviceName = Settings.loadedDeviceName;
  1177. #endif
  1178. if (loadedXRDeviceName == OPENVR_UNITY_NAME_STR)
  1179. loadedXRDevice = XRDevice.OpenVR;
  1180. else
  1181. loadedXRDevice = XRDevice.Unknown;
  1182. }
  1183. else
  1184. {
  1185. loadedXRDevice = XRDevice.Unknown;
  1186. }
  1187. }
  1188. #if USING_XR_SDK
  1189. public static XRDisplaySubsystem GetCurrentDisplaySubsystem()
  1190. {
  1191. List<XRDisplaySubsystem> displaySubsystems = new List<XRDisplaySubsystem>();
  1192. SubsystemManager.GetInstances(displaySubsystems);
  1193. //Note: Here we are making the assumption that there will always be one valid display subsystem. If there is not, then submitFrame isn't being called,
  1194. //so for now this is a fine assumption to make.
  1195. if (displaySubsystems.Count > 0)
  1196. return displaySubsystems[0];
  1197. return null;
  1198. }
  1199. public static XRDisplaySubsystemDescriptor GetCurrentDisplaySubsystemDescriptor()
  1200. {
  1201. List<XRDisplaySubsystemDescriptor> displaySubsystemDescriptors = new List<XRDisplaySubsystemDescriptor>();
  1202. SubsystemManager.GetSubsystemDescriptors(displaySubsystemDescriptors);
  1203. if (displaySubsystemDescriptors.Count > 0)
  1204. return displaySubsystemDescriptors[0];
  1205. return null;
  1206. }
  1207. #endif
  1208. void Initialize()
  1209. {
  1210. if (display == null)
  1211. display = new OVRDisplay();
  1212. if (tracker == null)
  1213. tracker = new OVRTracker();
  1214. if (boundary == null)
  1215. boundary = new OVRBoundary();
  1216. reorientHMDOnControllerRecenter = _reorientHMDOnControllerRecenter;
  1217. SetCurrentXRDevice();
  1218. }
  1219. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
  1220. private bool suppressDisableMixedRealityBecauseOfNoMainCameraWarning = false;
  1221. #endif
  1222. private void Update()
  1223. {
  1224. //Only if we're using the XR SDK do we have to check if OVRManager isn't yet initialized, and init it.
  1225. //If we're on legacy, we know initialization occurred properly in Awake()
  1226. #if USING_XR_SDK
  1227. if (!OVRManagerinitialized)
  1228. {
  1229. XRDisplaySubsystem currentDisplaySubsystem = GetCurrentDisplaySubsystem();
  1230. XRDisplaySubsystemDescriptor currentDisplaySubsystemDescriptor = GetCurrentDisplaySubsystemDescriptor();
  1231. if (currentDisplaySubsystem == null || currentDisplaySubsystemDescriptor == null || !OVRPlugin.initialized)
  1232. return;
  1233. //If we're using the XR SDK and the display subsystem is present, and OVRPlugin is initialized, we can init OVRManager
  1234. InitOVRManager();
  1235. }
  1236. #endif
  1237. #if UNITY_EDITOR
  1238. if (_scriptsReloaded)
  1239. {
  1240. _scriptsReloaded = false;
  1241. instance = this;
  1242. Initialize();
  1243. }
  1244. #endif
  1245. SetCurrentXRDevice();
  1246. if (OVRPlugin.shouldQuit)
  1247. {
  1248. Debug.Log("[OVRManager] OVRPlugin.shouldQuit detected");
  1249. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
  1250. StaticShutdownMixedRealityCapture(instance);
  1251. #endif
  1252. Application.Quit();
  1253. }
  1254. if (AllowRecenter && OVRPlugin.shouldRecenter)
  1255. {
  1256. OVRManager.display.RecenterPose();
  1257. }
  1258. if (trackingOriginType != _trackingOriginType)
  1259. trackingOriginType = _trackingOriginType;
  1260. tracker.isEnabled = usePositionTracking;
  1261. OVRPlugin.rotation = useRotationTracking;
  1262. OVRPlugin.useIPDInPositionTracking = useIPDInPositionTracking;
  1263. // Dispatch HMD events.
  1264. isHmdPresent = OVRNodeStateProperties.IsHmdPresent();
  1265. if (useRecommendedMSAALevel && QualitySettings.antiAliasing != display.recommendedMSAALevel)
  1266. {
  1267. Debug.Log("The current MSAA level is " + QualitySettings.antiAliasing +
  1268. ", but the recommended MSAA level is " + display.recommendedMSAALevel +
  1269. ". Switching to the recommended level.");
  1270. QualitySettings.antiAliasing = display.recommendedMSAALevel;
  1271. }
  1272. if (monoscopic != _monoscopic)
  1273. {
  1274. monoscopic = _monoscopic;
  1275. }
  1276. if (headPoseRelativeOffsetRotation != _headPoseRelativeOffsetRotation)
  1277. {
  1278. headPoseRelativeOffsetRotation = _headPoseRelativeOffsetRotation;
  1279. }
  1280. if (headPoseRelativeOffsetTranslation != _headPoseRelativeOffsetTranslation)
  1281. {
  1282. headPoseRelativeOffsetTranslation = _headPoseRelativeOffsetTranslation;
  1283. }
  1284. if (_wasHmdPresent && !isHmdPresent)
  1285. {
  1286. try
  1287. {
  1288. Debug.Log("[OVRManager] HMDLost event");
  1289. if (HMDLost != null)
  1290. HMDLost();
  1291. }
  1292. catch (Exception e)
  1293. {
  1294. Debug.LogError("Caught Exception: " + e);
  1295. }
  1296. }
  1297. if (!_wasHmdPresent && isHmdPresent)
  1298. {
  1299. try
  1300. {
  1301. Debug.Log("[OVRManager] HMDAcquired event");
  1302. if (HMDAcquired != null)
  1303. HMDAcquired();
  1304. }
  1305. catch (Exception e)
  1306. {
  1307. Debug.LogError("Caught Exception: " + e);
  1308. }
  1309. }
  1310. _wasHmdPresent = isHmdPresent;
  1311. // Dispatch HMD mounted events.
  1312. isUserPresent = OVRPlugin.userPresent;
  1313. if (_wasUserPresent && !isUserPresent)
  1314. {
  1315. try
  1316. {
  1317. Debug.Log("[OVRManager] HMDUnmounted event");
  1318. if (HMDUnmounted != null)
  1319. HMDUnmounted();
  1320. }
  1321. catch (Exception e)
  1322. {
  1323. Debug.LogError("Caught Exception: " + e);
  1324. }
  1325. }
  1326. if (!_wasUserPresent && isUserPresent)
  1327. {
  1328. try
  1329. {
  1330. Debug.Log("[OVRManager] HMDMounted event");
  1331. if (HMDMounted != null)
  1332. HMDMounted();
  1333. }
  1334. catch (Exception e)
  1335. {
  1336. Debug.LogError("Caught Exception: " + e);
  1337. }
  1338. }
  1339. _wasUserPresent = isUserPresent;
  1340. // Dispatch VR Focus events.
  1341. hasVrFocus = OVRPlugin.hasVrFocus;
  1342. if (_hadVrFocus && !hasVrFocus)
  1343. {
  1344. try
  1345. {
  1346. Debug.Log("[OVRManager] VrFocusLost event");
  1347. if (VrFocusLost != null)
  1348. VrFocusLost();
  1349. }
  1350. catch (Exception e)
  1351. {
  1352. Debug.LogError("Caught Exception: " + e);
  1353. }
  1354. }
  1355. if (!_hadVrFocus && hasVrFocus)
  1356. {
  1357. try
  1358. {
  1359. Debug.Log("[OVRManager] VrFocusAcquired event");
  1360. if (VrFocusAcquired != null)
  1361. VrFocusAcquired();
  1362. }
  1363. catch (Exception e)
  1364. {
  1365. Debug.LogError("Caught Exception: " + e);
  1366. }
  1367. }
  1368. _hadVrFocus = hasVrFocus;
  1369. // Dispatch VR Input events.
  1370. bool hasInputFocus = OVRPlugin.hasInputFocus;
  1371. if (_hadInputFocus && !hasInputFocus)
  1372. {
  1373. try
  1374. {
  1375. Debug.Log("[OVRManager] InputFocusLost event");
  1376. if (InputFocusLost != null)
  1377. InputFocusLost();
  1378. }
  1379. catch (Exception e)
  1380. {
  1381. Debug.LogError("Caught Exception: " + e);
  1382. }
  1383. }
  1384. if (!_hadInputFocus && hasInputFocus)
  1385. {
  1386. try
  1387. {
  1388. Debug.Log("[OVRManager] InputFocusAcquired event");
  1389. if (InputFocusAcquired != null)
  1390. InputFocusAcquired();
  1391. }
  1392. catch (Exception e)
  1393. {
  1394. Debug.LogError("Caught Exception: " + e);
  1395. }
  1396. }
  1397. _hadInputFocus = hasInputFocus;
  1398. // Changing effective rendering resolution dynamically according performance
  1399. #if (UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
  1400. if (enableAdaptiveResolution)
  1401. {
  1402. #if UNITY_2017_2_OR_NEWER
  1403. if (Settings.eyeTextureResolutionScale < maxRenderScale)
  1404. {
  1405. // Allocate renderScale to max to avoid re-allocation
  1406. Settings.eyeTextureResolutionScale = maxRenderScale;
  1407. }
  1408. else
  1409. {
  1410. // Adjusting maxRenderScale in case app started with a larger renderScale value
  1411. maxRenderScale = Mathf.Max(maxRenderScale, Settings.eyeTextureResolutionScale);
  1412. }
  1413. minRenderScale = Mathf.Min(minRenderScale, maxRenderScale);
  1414. float minViewportScale = minRenderScale / Settings.eyeTextureResolutionScale;
  1415. float recommendedViewportScale = Mathf.Clamp(Mathf.Sqrt(OVRPlugin.GetAdaptiveGPUPerformanceScale()) * Settings.eyeTextureResolutionScale * Settings.renderViewportScale, 0.5f, 2.0f);
  1416. recommendedViewportScale /= Settings.eyeTextureResolutionScale;
  1417. recommendedViewportScale = Mathf.Clamp(recommendedViewportScale, minViewportScale, 1.0f);
  1418. Settings.renderViewportScale = recommendedViewportScale;
  1419. #else
  1420. if (UnityEngine.VR.VRSettings.renderScale < maxRenderScale)
  1421. {
  1422. // Allocate renderScale to max to avoid re-allocation
  1423. UnityEngine.VR.VRSettings.renderScale = maxRenderScale;
  1424. }
  1425. else
  1426. {
  1427. // Adjusting maxRenderScale in case app started with a larger renderScale value
  1428. maxRenderScale = Mathf.Max(maxRenderScale, UnityEngine.VR.VRSettings.renderScale);
  1429. }
  1430. minRenderScale = Mathf.Min(minRenderScale, maxRenderScale);
  1431. float minViewportScale = minRenderScale / UnityEngine.VR.VRSettings.renderScale;
  1432. float recommendedViewportScale = OVRPlugin.GetEyeRecommendedResolutionScale() / UnityEngine.VR.VRSettings.renderScale;
  1433. recommendedViewportScale = Mathf.Clamp(recommendedViewportScale, minViewportScale, 1.0f);
  1434. UnityEngine.VR.VRSettings.renderViewportScale = recommendedViewportScale;
  1435. #endif
  1436. }
  1437. #endif
  1438. // Dispatch Audio Device events.
  1439. string audioOutId = OVRPlugin.audioOutId;
  1440. if (!prevAudioOutIdIsCached)
  1441. {
  1442. prevAudioOutId = audioOutId;
  1443. prevAudioOutIdIsCached = true;
  1444. }
  1445. else if (audioOutId != prevAudioOutId)
  1446. {
  1447. try
  1448. {
  1449. Debug.Log("[OVRManager] AudioOutChanged event");
  1450. if (AudioOutChanged != null)
  1451. AudioOutChanged();
  1452. }
  1453. catch (Exception e)
  1454. {
  1455. Debug.LogError("Caught Exception: " + e);
  1456. }
  1457. prevAudioOutId = audioOutId;
  1458. }
  1459. string audioInId = OVRPlugin.audioInId;
  1460. if (!prevAudioInIdIsCached)
  1461. {
  1462. prevAudioInId = audioInId;
  1463. prevAudioInIdIsCached = true;
  1464. }
  1465. else if (audioInId != prevAudioInId)
  1466. {
  1467. try
  1468. {
  1469. Debug.Log("[OVRManager] AudioInChanged event");
  1470. if (AudioInChanged != null)
  1471. AudioInChanged();
  1472. }
  1473. catch (Exception e)
  1474. {
  1475. Debug.LogError("Caught Exception: " + e);
  1476. }
  1477. prevAudioInId = audioInId;
  1478. }
  1479. // Dispatch tracking events.
  1480. if (wasPositionTracked && !tracker.isPositionTracked)
  1481. {
  1482. try
  1483. {
  1484. Debug.Log("[OVRManager] TrackingLost event");
  1485. if (TrackingLost != null)
  1486. TrackingLost();
  1487. }
  1488. catch (Exception e)
  1489. {
  1490. Debug.LogError("Caught Exception: " + e);
  1491. }
  1492. }
  1493. if (!wasPositionTracked && tracker.isPositionTracked)
  1494. {
  1495. try
  1496. {
  1497. Debug.Log("[OVRManager] TrackingAcquired event");
  1498. if (TrackingAcquired != null)
  1499. TrackingAcquired();
  1500. }
  1501. catch (Exception e)
  1502. {
  1503. Debug.LogError("Caught Exception: " + e);
  1504. }
  1505. }
  1506. wasPositionTracked = tracker.isPositionTracked;
  1507. display.Update();
  1508. OVRInput.Update();
  1509. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
  1510. StaticUpdateMixedRealityCapture(this);
  1511. #endif
  1512. }
  1513. private bool multipleMainCameraWarningPresented = false;
  1514. private Camera lastFoundMainCamera = null;
  1515. private Camera FindMainCamera()
  1516. {
  1517. if (lastFoundMainCamera != null && lastFoundMainCamera.CompareTag("MainCamera"))
  1518. {
  1519. return lastFoundMainCamera;
  1520. }
  1521. Camera result = null;
  1522. GameObject[] objects = GameObject.FindGameObjectsWithTag("MainCamera");
  1523. List<Camera> cameras = new List<Camera>(4);
  1524. foreach (GameObject obj in objects)
  1525. {
  1526. Camera camera = obj.GetComponent<Camera>();
  1527. if (camera != null && camera.enabled)
  1528. {
  1529. OVRCameraRig cameraRig = camera.GetComponentInParent<OVRCameraRig>();
  1530. if (cameraRig != null && cameraRig.trackingSpace != null)
  1531. {
  1532. cameras.Add(camera);
  1533. }
  1534. }
  1535. }
  1536. if (cameras.Count == 0)
  1537. {
  1538. result = Camera.main; // pick one of the cameras which tagged as "MainCamera"
  1539. }
  1540. else if (cameras.Count == 1)
  1541. {
  1542. result = cameras[0];
  1543. }
  1544. else
  1545. {
  1546. if (!multipleMainCameraWarningPresented)
  1547. {
  1548. Debug.LogWarning("Multiple MainCamera found. Assume the real MainCamera is the camera with the least depth");
  1549. multipleMainCameraWarningPresented = true;
  1550. }
  1551. // return the camera with least depth
  1552. cameras.Sort((Camera c0, Camera c1) => { return c0.depth < c1.depth ? -1 : (c0.depth > c1.depth ? 1 : 0); });
  1553. result = cameras[0];
  1554. }
  1555. if (result != null)
  1556. {
  1557. Debug.LogFormat("[OVRManager] mainCamera found for MRC: ", result.gameObject.name);
  1558. }
  1559. else
  1560. {
  1561. Debug.Log("[OVRManager] unable to find a vaild camera");
  1562. }
  1563. lastFoundMainCamera = result;
  1564. return result;
  1565. }
  1566. private void OnDisable()
  1567. {
  1568. OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer perfTcpServer = GetComponent<OVRSystemPerfMetrics.OVRSystemPerfMetricsTcpServer>();
  1569. if (perfTcpServer != null)
  1570. {
  1571. perfTcpServer.enabled = false;
  1572. }
  1573. }
  1574. private void LateUpdate()
  1575. {
  1576. OVRHaptics.Process();
  1577. }
  1578. private void FixedUpdate()
  1579. {
  1580. OVRInput.FixedUpdate();
  1581. }
  1582. private void OnDestroy()
  1583. {
  1584. Debug.Log("[OVRManager] OnDestroy");
  1585. OVRManagerinitialized = false;
  1586. }
  1587. private void OnApplicationPause(bool pause)
  1588. {
  1589. if (pause)
  1590. {
  1591. Debug.Log("[OVRManager] OnApplicationPause(true)");
  1592. }
  1593. else
  1594. {
  1595. Debug.Log("[OVRManager] OnApplicationPause(false)");
  1596. }
  1597. }
  1598. private void OnApplicationFocus(bool focus)
  1599. {
  1600. if (focus)
  1601. {
  1602. Debug.Log("[OVRManager] OnApplicationFocus(true)");
  1603. }
  1604. else
  1605. {
  1606. Debug.Log("[OVRManager] OnApplicationFocus(false)");
  1607. }
  1608. }
  1609. private void OnApplicationQuit()
  1610. {
  1611. Debug.Log("[OVRManager] OnApplicationQuit");
  1612. }
  1613. #endregion // Unity Messages
  1614. /// <summary>
  1615. /// Leaves the application/game and returns to the launcher/dashboard
  1616. /// </summary>
  1617. public void ReturnToLauncher()
  1618. {
  1619. // show the platform UI quit prompt
  1620. OVRManager.PlatformUIConfirmQuit();
  1621. }
  1622. public static void PlatformUIConfirmQuit()
  1623. {
  1624. if (!isHmdPresent)
  1625. return;
  1626. OVRPlugin.ShowUI(OVRPlugin.PlatformUI.ConfirmQuit);
  1627. }
  1628. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
  1629. public static bool staticMixedRealityCaptureInitialized = false;
  1630. public static bool staticPrevEnableMixedRealityCapture = false;
  1631. public static OVRMixedRealityCaptureSettings staticMrcSettings = null;
  1632. public static void StaticInitializeMixedRealityCapture(OVRManager instance)
  1633. {
  1634. if (!staticMixedRealityCaptureInitialized)
  1635. {
  1636. staticMrcSettings = ScriptableObject.CreateInstance<OVRMixedRealityCaptureSettings>();
  1637. staticMrcSettings.ReadFrom(OVRManager.instance);
  1638. #if OVR_ANDROID_MRC
  1639. bool mediaInitialized = OVRPlugin.Media.Initialize();
  1640. Debug.Log(mediaInitialized ? "OVRPlugin.Media initialized" : "OVRPlugin.Media not initialized");
  1641. if (mediaInitialized)
  1642. {
  1643. OVRPlugin.Media.SetMrcAudioSampleRate(AudioSettings.outputSampleRate);
  1644. Debug.LogFormat("[MRC] SetMrcAudioSampleRate({0})", AudioSettings.outputSampleRate);
  1645. OVRPlugin.Media.SetMrcInputVideoBufferType(OVRPlugin.Media.InputVideoBufferType.TextureHandle);
  1646. Debug.LogFormat("[MRC] Active InputVideoBufferType:{0}", OVRPlugin.Media.GetMrcInputVideoBufferType());
  1647. if (instance.mrcActivationMode == MrcActivationMode.Automatic)
  1648. {
  1649. OVRPlugin.Media.SetMrcActivationMode(OVRPlugin.Media.MrcActivationMode.Automatic);
  1650. Debug.LogFormat("[MRC] ActivateMode: Automatic");
  1651. }
  1652. else if (instance.mrcActivationMode == MrcActivationMode.Disabled)
  1653. {
  1654. OVRPlugin.Media.SetMrcActivationMode(OVRPlugin.Media.MrcActivationMode.Disabled);
  1655. Debug.LogFormat("[MRC] ActivateMode: Disabled");
  1656. }
  1657. }
  1658. #endif
  1659. staticPrevEnableMixedRealityCapture = false;
  1660. staticMixedRealityCaptureInitialized = true;
  1661. }
  1662. else
  1663. {
  1664. staticMrcSettings.ApplyTo(instance);
  1665. }
  1666. }
  1667. public static void StaticUpdateMixedRealityCapture(OVRManager instance)
  1668. {
  1669. if (!staticMixedRealityCaptureInitialized)
  1670. {
  1671. return;
  1672. }
  1673. #if OVR_ANDROID_MRC
  1674. instance.enableMixedReality = OVRPlugin.Media.GetInitialized() && OVRPlugin.Media.IsMrcActivated();
  1675. instance.compositionMethod = CompositionMethod.External; // force external composition on Android MRC
  1676. if (OVRPlugin.Media.GetInitialized())
  1677. {
  1678. OVRPlugin.Media.Update();
  1679. }
  1680. #endif
  1681. if (instance.enableMixedReality && !staticPrevEnableMixedRealityCapture)
  1682. {
  1683. OVRPlugin.SendEvent("mixed_reality_capture", "activated");
  1684. Debug.Log("MixedRealityCapture: activate");
  1685. }
  1686. if (!instance.enableMixedReality && staticPrevEnableMixedRealityCapture)
  1687. {
  1688. Debug.Log("MixedRealityCapture: deactivate");
  1689. }
  1690. if (instance.enableMixedReality || staticPrevEnableMixedRealityCapture)
  1691. {
  1692. Camera mainCamera = instance.FindMainCamera();
  1693. if (Camera.main != null)
  1694. {
  1695. instance.suppressDisableMixedRealityBecauseOfNoMainCameraWarning = false;
  1696. if (instance.enableMixedReality)
  1697. {
  1698. OVRMixedReality.Update(instance.gameObject, mainCamera, instance.compositionMethod, instance.useDynamicLighting, instance.capturingCameraDevice, instance.depthQuality);
  1699. }
  1700. if (staticPrevEnableMixedRealityCapture && !instance.enableMixedReality)
  1701. {
  1702. OVRMixedReality.Cleanup();
  1703. }
  1704. staticPrevEnableMixedRealityCapture = instance.enableMixedReality;
  1705. }
  1706. else
  1707. {
  1708. if (!instance.suppressDisableMixedRealityBecauseOfNoMainCameraWarning)
  1709. {
  1710. Debug.LogWarning("Main Camera is not set, Mixed Reality disabled");
  1711. instance.suppressDisableMixedRealityBecauseOfNoMainCameraWarning = true;
  1712. }
  1713. }
  1714. }
  1715. staticMrcSettings.ReadFrom(OVRManager.instance);
  1716. }
  1717. public static void StaticShutdownMixedRealityCapture(OVRManager instance)
  1718. {
  1719. if (staticMixedRealityCaptureInitialized)
  1720. {
  1721. ScriptableObject.Destroy(staticMrcSettings);
  1722. staticMrcSettings = null;
  1723. OVRMixedReality.Cleanup();
  1724. #if OVR_ANDROID_MRC
  1725. if (OVRPlugin.Media.GetInitialized())
  1726. {
  1727. OVRPlugin.Media.Shutdown();
  1728. }
  1729. #endif
  1730. staticMixedRealityCaptureInitialized = false;
  1731. }
  1732. }
  1733. #endif
  1734. }