Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

965 lines
32 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. using UnityEngine;
  14. using System;
  15. using System.Collections;
  16. using System.Runtime.InteropServices;
  17. #if UNITY_2017_2_OR_NEWER
  18. using Settings = UnityEngine.XR.XRSettings;
  19. #else
  20. using Settings = UnityEngine.VR.VRSettings;
  21. #endif
  22. /// <summary>
  23. /// Add OVROverlay script to an object with an optional mesh primitive
  24. /// rendered as a TimeWarp overlay instead by drawing it into the eye buffer.
  25. /// This will take full advantage of the display resolution and avoid double
  26. /// resampling of the texture.
  27. ///
  28. /// We support 3 types of Overlay shapes right now
  29. /// 1. Quad : This is most common overlay type , you render a quad in Timewarp space.
  30. /// 2. Cylinder: [Mobile Only][Experimental], Display overlay as partial surface of a cylinder
  31. /// * The cylinder's center will be your game object's center
  32. /// * We encoded the cylinder's parameters in transform.scale,
  33. /// **[scale.z] is the radius of the cylinder
  34. /// **[scale.y] is the height of the cylinder
  35. /// **[scale.x] is the length of the arc of cylinder
  36. /// * Limitations
  37. /// **Only the half of the cylinder can be displayed, which means the arc angle has to be smaller than 180 degree, [scale.x] / [scale.z] <= PI
  38. /// **Your camera has to be inside of the inscribed sphere of the cylinder, the overlay will be faded out automatically when the camera is close to the inscribed sphere's surface.
  39. /// **Translation only works correctly with vrDriver 1.04 or above
  40. /// 3. Cubemap: Display overlay as a cube map
  41. /// 4. OffcenterCubemap: [Mobile Only] Display overlay as a cube map with a texture coordinate offset
  42. /// * The actually sampling will looks like [color = texture(cubeLayerSampler, normalize(direction) + offset)] instead of [color = texture( cubeLayerSampler, direction )]
  43. /// * The extra center offset can be feed from transform.position
  44. /// * Note: if transform.position's magnitude is greater than 1, which will cause some cube map pixel always invisible
  45. /// Which is usually not what people wanted, we don't kill the ability for developer to do so here, but will warn out.
  46. /// 5. Equirect: Display overlay as a 360-degree equirectangular skybox.
  47. /// </summary>
  48. public class OVROverlay : MonoBehaviour
  49. {
  50. #region Interface
  51. /// <summary>
  52. /// Determines the on-screen appearance of a layer.
  53. /// </summary>
  54. public enum OverlayShape
  55. {
  56. Quad = OVRPlugin.OverlayShape.Quad,
  57. Cylinder = OVRPlugin.OverlayShape.Cylinder,
  58. Cubemap = OVRPlugin.OverlayShape.Cubemap,
  59. OffcenterCubemap = OVRPlugin.OverlayShape.OffcenterCubemap,
  60. Equirect = OVRPlugin.OverlayShape.Equirect,
  61. }
  62. /// <summary>
  63. /// Whether the layer appears behind or infront of other content in the scene.
  64. /// </summary>
  65. public enum OverlayType
  66. {
  67. None,
  68. Underlay,
  69. Overlay,
  70. };
  71. /// <summary>
  72. /// Specify overlay's type
  73. /// </summary>
  74. [Tooltip("Specify overlay's type")]
  75. public OverlayType currentOverlayType = OverlayType.Overlay;
  76. /// <summary>
  77. /// If true, the texture's content is copied to the compositor each frame.
  78. /// </summary>
  79. [Tooltip("If true, the texture's content is copied to the compositor each frame.")]
  80. public bool isDynamic = false;
  81. /// <summary>
  82. /// If true, the layer would be used to present protected content (e.g. HDCP). The flag is effective only on PC.
  83. /// </summary>
  84. [Tooltip("If true, the layer would be used to present protected content (e.g. HDCP). The flag is effective only on PC.")]
  85. public bool isProtectedContent = false;
  86. //Source and dest rects
  87. public Rect srcRectLeft = new Rect();
  88. public Rect srcRectRight = new Rect();
  89. public Rect destRectLeft = new Rect();
  90. public Rect destRectRight = new Rect();
  91. private OVRPlugin.TextureRectMatrixf textureRectMatrix = OVRPlugin.TextureRectMatrixf.zero;
  92. public bool overrideTextureRectMatrix = false;
  93. public bool overridePerLayerColorScaleAndOffset = false;
  94. public Vector4 colorScale = Vector4.one;
  95. public Vector4 colorOffset = Vector4.zero;
  96. //Warning: Developers should only use this supersample setting if they absolutely have the budget and need for it. It is extremely expensive, and will not be relevant for most developers.
  97. public bool useExpensiveSuperSample = false;
  98. //Property that can hide overlays when required. Should be false when present, true when hidden.
  99. public bool hidden = false;
  100. /// <summary>
  101. /// If true, the layer will be created as an external surface. externalSurfaceObject contains the Surface object. It's effective only on Android.
  102. /// </summary>
  103. [Tooltip("If true, the layer will be created as an external surface. externalSurfaceObject contains the Surface object. It's effective only on Android.")]
  104. public bool isExternalSurface = false;
  105. /// <summary>
  106. /// The width which will be used to create the external surface. It's effective only on Android.
  107. /// </summary>
  108. [Tooltip("The width which will be used to create the external surface. It's effective only on Android.")]
  109. public int externalSurfaceWidth = 0;
  110. /// <summary>
  111. /// The height which will be used to create the external surface. It's effective only on Android.
  112. /// </summary>
  113. [Tooltip("The height which will be used to create the external surface. It's effective only on Android.")]
  114. public int externalSurfaceHeight = 0;
  115. /// <summary>
  116. /// The compositionDepth defines the order of the OVROverlays in composition. The overlay/underlay with smaller compositionDepth would be composited in the front of the overlay/underlay with larger compositionDepth.
  117. /// </summary>
  118. [Tooltip("The compositionDepth defines the order of the OVROverlays in composition. The overlay/underlay with smaller compositionDepth would be composited in the front of the overlay/underlay with larger compositionDepth.")]
  119. public int compositionDepth = 0;
  120. /// <summary>
  121. /// The noDepthBufferTesting will stop layer's depth buffer compositing even if the engine has "Depth buffer sharing" enabled on Rift.
  122. /// </summary>
  123. [Tooltip("The noDepthBufferTesting will stop layer's depth buffer compositing even if the engine has \"Shared Depth Buffer\" enabled")]
  124. public bool noDepthBufferTesting = false;
  125. //Format corresponding to the source texture for this layer. sRGB by default, but can be modified if necessary
  126. public OVRPlugin.EyeTextureFormat layerTextureFormat = OVRPlugin.EyeTextureFormat.R8G8B8A8_sRGB;
  127. /// <summary>
  128. /// Specify overlay's shape
  129. /// </summary>
  130. [Tooltip("Specify overlay's shape")]
  131. public OverlayShape currentOverlayShape = OverlayShape.Quad;
  132. private OverlayShape prevOverlayShape = OverlayShape.Quad;
  133. /// <summary>
  134. /// The left- and right-eye Textures to show in the layer.
  135. /// \note If you need to change the texture on a per-frame basis, please use OverrideOverlayTextureInfo(..) to avoid caching issues.
  136. /// </summary>
  137. [Tooltip("The left- and right-eye Textures to show in the layer.")]
  138. public Texture[] textures = new Texture[] { null, null };
  139. protected IntPtr[] texturePtrs = new IntPtr[] { IntPtr.Zero, IntPtr.Zero };
  140. /// <summary>
  141. /// The Surface object (Android only).
  142. /// </summary>
  143. public System.IntPtr externalSurfaceObject;
  144. public delegate void ExternalSurfaceObjectCreated();
  145. /// <summary>
  146. /// Will be triggered after externalSurfaceTextueObject get created.
  147. /// </summary>
  148. public ExternalSurfaceObjectCreated externalSurfaceObjectCreated;
  149. /// <summary>
  150. /// Use this function to set texture and texNativePtr when app is running
  151. /// GetNativeTexturePtr is a slow behavior, the value should be pre-cached
  152. /// </summary>
  153. #if UNITY_2017_2_OR_NEWER
  154. public void OverrideOverlayTextureInfo(Texture srcTexture, IntPtr nativePtr, UnityEngine.XR.XRNode node)
  155. #else
  156. public void OverrideOverlayTextureInfo(Texture srcTexture, IntPtr nativePtr, UnityEngine.VR.VRNode node)
  157. #endif
  158. {
  159. #if UNITY_2017_2_OR_NEWER
  160. int index = (node == UnityEngine.XR.XRNode.RightEye) ? 1 : 0;
  161. #else
  162. int index = (node == UnityEngine.VR.VRNode.RightEye) ? 1 : 0;
  163. #endif
  164. if (textures.Length <= index)
  165. return;
  166. textures[index] = srcTexture;
  167. texturePtrs[index] = nativePtr;
  168. isOverridePending = true;
  169. }
  170. protected bool isOverridePending;
  171. internal const int maxInstances = 15;
  172. public static OVROverlay[] instances = new OVROverlay[maxInstances];
  173. #endregion
  174. private static Material tex2DMaterial;
  175. private static Material cubeMaterial;
  176. private OVRPlugin.LayerLayout layout {
  177. get {
  178. #if UNITY_ANDROID && !UNITY_EDITOR
  179. if (textures.Length == 2 && textures[1] != null)
  180. return OVRPlugin.LayerLayout.Stereo;
  181. #endif
  182. return OVRPlugin.LayerLayout.Mono;
  183. }
  184. }
  185. private struct LayerTexture {
  186. public Texture appTexture;
  187. public IntPtr appTexturePtr;
  188. public Texture[] swapChain;
  189. public IntPtr[] swapChainPtr;
  190. };
  191. private LayerTexture[] layerTextures;
  192. private OVRPlugin.LayerDesc layerDesc;
  193. private int stageCount = -1;
  194. private int layerIndex = -1; // Controls the composition order based on wake-up time.
  195. private int layerId = 0; // The layer's internal handle in the compositor.
  196. private GCHandle layerIdHandle;
  197. private IntPtr layerIdPtr = IntPtr.Zero;
  198. private int frameIndex = 0;
  199. private int prevFrameIndex = -1;
  200. private Renderer rend;
  201. private int texturesPerStage { get { return (layout == OVRPlugin.LayerLayout.Stereo) ? 2 : 1; } }
  202. private bool CreateLayer(int mipLevels, int sampleCount, OVRPlugin.EyeTextureFormat etFormat, int flags, OVRPlugin.Sizei size, OVRPlugin.OverlayShape shape)
  203. {
  204. if (!layerIdHandle.IsAllocated || layerIdPtr == IntPtr.Zero)
  205. {
  206. layerIdHandle = GCHandle.Alloc(layerId, GCHandleType.Pinned);
  207. layerIdPtr = layerIdHandle.AddrOfPinnedObject();
  208. }
  209. if (layerIndex == -1)
  210. {
  211. for (int i = 0; i < maxInstances; ++i)
  212. {
  213. if (instances[i] == null || instances[i] == this)
  214. {
  215. layerIndex = i;
  216. instances[i] = this;
  217. break;
  218. }
  219. }
  220. }
  221. bool needsSetup = (
  222. isOverridePending ||
  223. layerDesc.MipLevels != mipLevels ||
  224. layerDesc.SampleCount != sampleCount ||
  225. layerDesc.Format != etFormat ||
  226. layerDesc.Layout != layout ||
  227. layerDesc.LayerFlags != flags ||
  228. !layerDesc.TextureSize.Equals(size) ||
  229. layerDesc.Shape != shape);
  230. if (!needsSetup)
  231. return false;
  232. OVRPlugin.LayerDesc desc = OVRPlugin.CalculateLayerDesc(shape, layout, size, mipLevels, sampleCount, etFormat, flags);
  233. OVRPlugin.EnqueueSetupLayer(desc, compositionDepth, layerIdPtr);
  234. layerId = (int)layerIdHandle.Target;
  235. if (layerId > 0)
  236. {
  237. layerDesc = desc;
  238. if (isExternalSurface)
  239. {
  240. stageCount = 1;
  241. }
  242. else
  243. {
  244. stageCount = OVRPlugin.GetLayerTextureStageCount(layerId);
  245. }
  246. }
  247. isOverridePending = false;
  248. return true;
  249. }
  250. private bool CreateLayerTextures(bool useMipmaps, OVRPlugin.Sizei size, bool isHdr)
  251. {
  252. if (isExternalSurface)
  253. {
  254. if (externalSurfaceObject == System.IntPtr.Zero)
  255. {
  256. externalSurfaceObject = OVRPlugin.GetLayerAndroidSurfaceObject(layerId);
  257. if (externalSurfaceObject != System.IntPtr.Zero)
  258. {
  259. Debug.LogFormat("GetLayerAndroidSurfaceObject returns {0}", externalSurfaceObject);
  260. if (externalSurfaceObjectCreated != null)
  261. {
  262. externalSurfaceObjectCreated();
  263. }
  264. }
  265. }
  266. return false;
  267. }
  268. bool needsCopy = false;
  269. if (stageCount <= 0)
  270. return false;
  271. // For newer SDKs, blit directly to the surface that will be used in compositing.
  272. if (layerTextures == null)
  273. layerTextures = new LayerTexture[texturesPerStage];
  274. for (int eyeId = 0; eyeId < texturesPerStage; ++eyeId)
  275. {
  276. if (layerTextures[eyeId].swapChain == null)
  277. layerTextures[eyeId].swapChain = new Texture[stageCount];
  278. if (layerTextures[eyeId].swapChainPtr == null)
  279. layerTextures[eyeId].swapChainPtr = new IntPtr[stageCount];
  280. for (int stage = 0; stage < stageCount; ++stage)
  281. {
  282. Texture sc = layerTextures[eyeId].swapChain[stage];
  283. IntPtr scPtr = layerTextures[eyeId].swapChainPtr[stage];
  284. if (sc != null && scPtr != IntPtr.Zero && size.w == sc.width && size.h == sc.height)
  285. continue;
  286. if (scPtr == IntPtr.Zero)
  287. scPtr = OVRPlugin.GetLayerTexture(layerId, stage, (OVRPlugin.Eye)eyeId);
  288. if (scPtr == IntPtr.Zero)
  289. continue;
  290. var txFormat = (isHdr) ? TextureFormat.RGBAHalf : TextureFormat.RGBA32;
  291. if (currentOverlayShape != OverlayShape.Cubemap && currentOverlayShape != OverlayShape.OffcenterCubemap)
  292. sc = Texture2D.CreateExternalTexture(size.w, size.h, txFormat, useMipmaps, true, scPtr);
  293. #if UNITY_2017_1_OR_NEWER
  294. else
  295. sc = Cubemap.CreateExternalTexture(size.w, txFormat, useMipmaps, scPtr);
  296. #endif
  297. layerTextures[eyeId].swapChain[stage] = sc;
  298. layerTextures[eyeId].swapChainPtr[stage] = scPtr;
  299. needsCopy = true;
  300. }
  301. }
  302. return needsCopy;
  303. }
  304. private void DestroyLayerTextures()
  305. {
  306. if (isExternalSurface)
  307. {
  308. return;
  309. }
  310. for (int eyeId = 0; layerTextures != null && eyeId < texturesPerStage; ++eyeId)
  311. {
  312. if (layerTextures[eyeId].swapChain != null)
  313. {
  314. for (int stage = 0; stage < stageCount; ++stage)
  315. DestroyImmediate(layerTextures[eyeId].swapChain[stage]);
  316. }
  317. }
  318. layerTextures = null;
  319. }
  320. private void DestroyLayer()
  321. {
  322. if (layerIndex != -1)
  323. {
  324. // Turn off the overlay if it was on.
  325. OVRPlugin.EnqueueSubmitLayer(true, false, false, IntPtr.Zero, IntPtr.Zero, -1, 0, OVRPose.identity.ToPosef_Legacy(), Vector3.one.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)prevOverlayShape);
  326. instances[layerIndex] = null;
  327. layerIndex = -1;
  328. }
  329. if (layerIdPtr != IntPtr.Zero)
  330. {
  331. OVRPlugin.EnqueueDestroyLayer(layerIdPtr);
  332. layerIdPtr = IntPtr.Zero;
  333. layerIdHandle.Free();
  334. layerId = 0;
  335. }
  336. layerDesc = new OVRPlugin.LayerDesc();
  337. frameIndex = 0;
  338. prevFrameIndex = -1;
  339. }
  340. /// <summary>
  341. /// Sets the source and dest rects for both eyes. Source explains what portion of the source texture is used, and
  342. /// dest is what portion of the destination texture is rendered into.
  343. /// </summary>
  344. public void SetSrcDestRects(Rect srcLeft, Rect srcRight, Rect destLeft, Rect destRight)
  345. {
  346. srcRectLeft = srcLeft;
  347. srcRectRight = srcRight;
  348. destRectLeft = destLeft;
  349. destRectRight = destRight;
  350. }
  351. public void UpdateTextureRectMatrix()
  352. {
  353. Rect srcRectLeftConverted = new Rect(srcRectLeft.x, 1 - srcRectLeft.y - srcRectLeft.height, srcRectLeft.width, srcRectLeft.height);
  354. Rect srcRectRightConverted = new Rect(srcRectRight.x, 1 - srcRectRight.y - srcRectRight.height, srcRectRight.width, srcRectRight.height);
  355. textureRectMatrix.leftRect = srcRectLeftConverted;
  356. textureRectMatrix.rightRect = srcRectRightConverted;
  357. float leftWidthFactor = srcRectLeftConverted.width / destRectLeft.width;
  358. float leftHeightFactor = srcRectLeftConverted.height / destRectLeft.height;
  359. textureRectMatrix.leftScaleBias = new Vector4(leftWidthFactor, leftHeightFactor, srcRectLeftConverted.x - destRectLeft.x * leftWidthFactor, srcRectLeftConverted.y - destRectLeft.y * leftHeightFactor);
  360. float rightWidthFactor = srcRectRightConverted.width / destRectRight.width;
  361. float rightHeightFactor = srcRectRightConverted.height / destRectRight.height;
  362. textureRectMatrix.rightScaleBias = new Vector4(rightWidthFactor, rightHeightFactor, srcRectRightConverted.x - destRectRight.x * rightWidthFactor, srcRectRightConverted.y - destRectRight.y * rightHeightFactor);
  363. }
  364. public void SetPerLayerColorScaleAndOffset(Vector4 scale, Vector4 offset)
  365. {
  366. colorScale = scale;
  367. colorOffset = offset;
  368. }
  369. private bool LatchLayerTextures()
  370. {
  371. if (isExternalSurface)
  372. {
  373. return true;
  374. }
  375. for (int i = 0; i < texturesPerStage; ++i)
  376. {
  377. if (textures[i] != layerTextures[i].appTexture || layerTextures[i].appTexturePtr == IntPtr.Zero)
  378. {
  379. if (textures[i] != null)
  380. {
  381. #if UNITY_EDITOR
  382. var assetPath = UnityEditor.AssetDatabase.GetAssetPath(textures[i]);
  383. var importer = (UnityEditor.TextureImporter)UnityEditor.TextureImporter.GetAtPath(assetPath);
  384. if (importer && importer.textureType != UnityEditor.TextureImporterType.Default)
  385. {
  386. Debug.LogError("Need Default Texture Type for overlay");
  387. return false;
  388. }
  389. #endif
  390. var rt = textures[i] as RenderTexture;
  391. if (rt && !rt.IsCreated())
  392. rt.Create();
  393. layerTextures[i].appTexturePtr = (texturePtrs[i] != IntPtr.Zero) ? texturePtrs[i] : textures[i].GetNativeTexturePtr();
  394. if (layerTextures[i].appTexturePtr != IntPtr.Zero)
  395. layerTextures[i].appTexture = textures[i];
  396. }
  397. }
  398. if (currentOverlayShape == OverlayShape.Cubemap)
  399. {
  400. if (textures[i] as Cubemap == null)
  401. {
  402. Debug.LogError("Need Cubemap texture for cube map overlay");
  403. return false;
  404. }
  405. }
  406. }
  407. #if !UNITY_ANDROID || UNITY_EDITOR
  408. if (currentOverlayShape == OverlayShape.OffcenterCubemap)
  409. {
  410. Debug.LogWarning("Overlay shape " + currentOverlayShape + " is not supported on current platform");
  411. return false;
  412. }
  413. #endif
  414. if (layerTextures[0].appTexture == null || layerTextures[0].appTexturePtr == IntPtr.Zero)
  415. return false;
  416. return true;
  417. }
  418. private OVRPlugin.LayerDesc GetCurrentLayerDesc()
  419. {
  420. OVRPlugin.Sizei textureSize = new OVRPlugin.Sizei() { w = 0, h = 0 };
  421. if (isExternalSurface)
  422. {
  423. textureSize.w = externalSurfaceWidth;
  424. textureSize.h = externalSurfaceHeight;
  425. }
  426. else
  427. {
  428. if (textures[0] == null)
  429. {
  430. Debug.LogWarning("textures[0] hasn't been set");
  431. }
  432. textureSize.w = textures[0] ? textures[0].width : 0;
  433. textureSize.h = textures[0] ? textures[0].height : 0;
  434. }
  435. OVRPlugin.LayerDesc newDesc = new OVRPlugin.LayerDesc() {
  436. Format = layerTextureFormat,
  437. LayerFlags = isExternalSurface ? 0 : (int)OVRPlugin.LayerFlags.TextureOriginAtBottomLeft,
  438. Layout = layout,
  439. MipLevels = 1,
  440. SampleCount = 1,
  441. Shape = (OVRPlugin.OverlayShape)currentOverlayShape,
  442. TextureSize = textureSize
  443. };
  444. var tex2D = textures[0] as Texture2D;
  445. if (tex2D != null)
  446. {
  447. if (tex2D.format == TextureFormat.RGBAHalf || tex2D.format == TextureFormat.RGBAFloat)
  448. newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
  449. newDesc.MipLevels = tex2D.mipmapCount;
  450. }
  451. var texCube = textures[0] as Cubemap;
  452. if (texCube != null)
  453. {
  454. if (texCube.format == TextureFormat.RGBAHalf || texCube.format == TextureFormat.RGBAFloat)
  455. newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
  456. newDesc.MipLevels = texCube.mipmapCount;
  457. }
  458. var rt = textures[0] as RenderTexture;
  459. if (rt != null)
  460. {
  461. newDesc.SampleCount = rt.antiAliasing;
  462. if (rt.format == RenderTextureFormat.ARGBHalf || rt.format == RenderTextureFormat.ARGBFloat || rt.format == RenderTextureFormat.RGB111110Float)
  463. newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
  464. }
  465. if (isProtectedContent)
  466. {
  467. newDesc.LayerFlags |= (int)OVRPlugin.LayerFlags.ProtectedContent;
  468. }
  469. if (isExternalSurface)
  470. {
  471. newDesc.LayerFlags |= (int)OVRPlugin.LayerFlags.AndroidSurfaceSwapChain;
  472. }
  473. return newDesc;
  474. }
  475. private bool PopulateLayer(int mipLevels, bool isHdr, OVRPlugin.Sizei size, int sampleCount, int stage)
  476. {
  477. if (isExternalSurface)
  478. {
  479. return true;
  480. }
  481. bool ret = false;
  482. RenderTextureFormat rtFormat = (isHdr) ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32;
  483. for (int eyeId = 0; eyeId < texturesPerStage; ++eyeId)
  484. {
  485. Texture et = layerTextures[eyeId].swapChain[stage];
  486. if (et == null)
  487. continue;
  488. for (int mip = 0; mip < mipLevels; ++mip)
  489. {
  490. int width = size.w >> mip;
  491. if (width < 1) width = 1;
  492. int height = size.h >> mip;
  493. if (height < 1) height = 1;
  494. #if UNITY_2017_1_1 || UNITY_2017_2_OR_NEWER
  495. RenderTextureDescriptor descriptor = new RenderTextureDescriptor(width, height, rtFormat, 0);
  496. descriptor.msaaSamples = sampleCount;
  497. descriptor.useMipMap = true;
  498. descriptor.autoGenerateMips = false;
  499. descriptor.sRGB = false;
  500. var tempRTDst = RenderTexture.GetTemporary(descriptor);
  501. #else
  502. var tempRTDst = RenderTexture.GetTemporary(width, height, 0, rtFormat, RenderTextureReadWrite.Linear, sampleCount);
  503. #endif
  504. if (!tempRTDst.IsCreated())
  505. tempRTDst.Create();
  506. tempRTDst.DiscardContents();
  507. bool dataIsLinear = isHdr || (QualitySettings.activeColorSpace == ColorSpace.Linear);
  508. #if !UNITY_2017_1_OR_NEWER
  509. var rt = textures[eyeId] as RenderTexture;
  510. dataIsLinear |= rt != null && rt.sRGB; //HACK: Unity 5.6 and earlier convert to linear on read from sRGB RenderTexture.
  511. #endif
  512. #if UNITY_ANDROID && !UNITY_EDITOR
  513. dataIsLinear = true; //HACK: Graphics.CopyTexture causes linear->srgb conversion on target write with D3D but not GLES.
  514. #endif
  515. if (currentOverlayShape != OverlayShape.Cubemap && currentOverlayShape != OverlayShape.OffcenterCubemap)
  516. {
  517. tex2DMaterial.SetInt("_linearToSrgb", (!isHdr && dataIsLinear) ? 1 : 0);
  518. //Resolve, decompress, swizzle, etc not handled by simple CopyTexture.
  519. #if !UNITY_ANDROID || UNITY_EDITOR
  520. // The PC compositor uses premultiplied alpha, so multiply it here.
  521. tex2DMaterial.SetInt("_premultiply", 1);
  522. #endif
  523. Graphics.Blit(textures[eyeId], tempRTDst, tex2DMaterial);
  524. Graphics.CopyTexture(tempRTDst, 0, 0, et, 0, mip);
  525. }
  526. #if UNITY_2017_1_OR_NEWER
  527. else // Cubemap
  528. {
  529. for (int face = 0; face < 6; ++face)
  530. {
  531. cubeMaterial.SetInt("_linearToSrgb", (!isHdr && dataIsLinear) ? 1 : 0);
  532. #if !UNITY_ANDROID || UNITY_EDITOR
  533. // The PC compositor uses premultiplied alpha, so multiply it here.
  534. cubeMaterial.SetInt("_premultiply", 1);
  535. #endif
  536. cubeMaterial.SetInt("_face", face);
  537. //Resolve, decompress, swizzle, etc not handled by simple CopyTexture.
  538. Graphics.Blit(textures[eyeId], tempRTDst, cubeMaterial);
  539. Graphics.CopyTexture(tempRTDst, 0, 0, et, face, mip);
  540. }
  541. }
  542. #endif
  543. RenderTexture.ReleaseTemporary(tempRTDst);
  544. ret = true;
  545. }
  546. }
  547. return ret;
  548. }
  549. private bool SubmitLayer(bool overlay, bool headLocked, bool noDepthBufferTesting, OVRPose pose, Vector3 scale, int frameIndex)
  550. {
  551. int rightEyeIndex = (texturesPerStage >= 2) ? 1 : 0;
  552. if (overrideTextureRectMatrix)
  553. {
  554. UpdateTextureRectMatrix();
  555. }
  556. bool isOverlayVisible = OVRPlugin.EnqueueSubmitLayer(overlay, headLocked, noDepthBufferTesting,
  557. isExternalSurface ? System.IntPtr.Zero : layerTextures[0].appTexturePtr,
  558. isExternalSurface ? System.IntPtr.Zero : layerTextures[rightEyeIndex].appTexturePtr,
  559. layerId, frameIndex, pose.flipZ().ToPosef_Legacy(), scale.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)currentOverlayShape,
  560. overrideTextureRectMatrix, textureRectMatrix, overridePerLayerColorScaleAndOffset, colorScale, colorOffset, useExpensiveSuperSample,
  561. hidden);
  562. prevOverlayShape = currentOverlayShape;
  563. return isOverlayVisible;
  564. }
  565. #region Unity Messages
  566. void Awake()
  567. {
  568. Debug.Log("Overlay Awake");
  569. if (tex2DMaterial == null)
  570. tex2DMaterial = new Material(Shader.Find("Oculus/Texture2D Blit"));
  571. if (cubeMaterial == null)
  572. cubeMaterial = new Material(Shader.Find("Oculus/Cubemap Blit"));
  573. rend = GetComponent<Renderer>();
  574. if (textures.Length == 0)
  575. textures = new Texture[] { null };
  576. // Backward compatibility
  577. if (rend != null && textures[0] == null)
  578. textures[0] = rend.material.mainTexture;
  579. }
  580. static public string OpenVROverlayKey { get { return "unity:" + Application.companyName + "." + Application.productName; } }
  581. private ulong OpenVROverlayHandle = OVR.OpenVR.OpenVR.k_ulOverlayHandleInvalid;
  582. void OnEnable()
  583. {
  584. if (OVRManager.OVRManagerinitialized)
  585. InitOVROverlay();
  586. }
  587. void InitOVROverlay()
  588. {
  589. if (!OVRManager.isHmdPresent)
  590. {
  591. enabled = false;
  592. return;
  593. }
  594. constructedOverlayXRDevice = OVRManager.XRDevice.Unknown;
  595. if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
  596. {
  597. OVR.OpenVR.CVROverlay overlay = OVR.OpenVR.OpenVR.Overlay;
  598. if (overlay != null)
  599. {
  600. OVR.OpenVR.EVROverlayError error = overlay.CreateOverlay(OpenVROverlayKey + transform.name, gameObject.name, ref OpenVROverlayHandle);
  601. if (error != OVR.OpenVR.EVROverlayError.None)
  602. {
  603. enabled = false;
  604. return;
  605. }
  606. }
  607. else
  608. {
  609. enabled = false;
  610. return;
  611. }
  612. }
  613. constructedOverlayXRDevice = OVRManager.loadedXRDevice;
  614. xrDeviceConstructed = true;
  615. }
  616. void OnDisable()
  617. {
  618. if ((gameObject.hideFlags & HideFlags.DontSaveInBuild) != 0)
  619. return;
  620. if (!OVRManager.OVRManagerinitialized)
  621. return;
  622. if (OVRManager.loadedXRDevice != constructedOverlayXRDevice)
  623. return;
  624. if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
  625. {
  626. DestroyLayerTextures();
  627. DestroyLayer();
  628. }
  629. else if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
  630. {
  631. if (OpenVROverlayHandle != OVR.OpenVR.OpenVR.k_ulOverlayHandleInvalid)
  632. {
  633. OVR.OpenVR.CVROverlay overlay = OVR.OpenVR.OpenVR.Overlay;
  634. if (overlay != null)
  635. {
  636. overlay.DestroyOverlay(OpenVROverlayHandle);
  637. }
  638. OpenVROverlayHandle = OVR.OpenVR.OpenVR.k_ulOverlayHandleInvalid;
  639. }
  640. }
  641. constructedOverlayXRDevice = OVRManager.XRDevice.Unknown;
  642. xrDeviceConstructed = false;
  643. }
  644. void OnDestroy()
  645. {
  646. DestroyLayerTextures();
  647. DestroyLayer();
  648. }
  649. bool ComputeSubmit(ref OVRPose pose, ref Vector3 scale, ref bool overlay, ref bool headLocked)
  650. {
  651. Camera headCamera = Camera.main;
  652. overlay = (currentOverlayType == OverlayType.Overlay);
  653. headLocked = false;
  654. for (var t = transform; t != null && !headLocked; t = t.parent)
  655. headLocked |= (t == headCamera.transform);
  656. pose = (headLocked) ? transform.ToHeadSpacePose(headCamera) : transform.ToTrackingSpacePose(headCamera);
  657. scale = transform.lossyScale;
  658. for (int i = 0; i < 3; ++i)
  659. scale[i] /= headCamera.transform.lossyScale[i];
  660. if (currentOverlayShape == OverlayShape.Cubemap)
  661. {
  662. #if UNITY_ANDROID && !UNITY_EDITOR
  663. //HACK: VRAPI cubemaps assume are yawed 180 degrees relative to LibOVR.
  664. pose.orientation = pose.orientation * Quaternion.AngleAxis(180, Vector3.up);
  665. #endif
  666. pose.position = headCamera.transform.position;
  667. }
  668. // Pack the offsetCenter directly into pose.position for offcenterCubemap
  669. if (currentOverlayShape == OverlayShape.OffcenterCubemap)
  670. {
  671. pose.position = transform.position;
  672. if (pose.position.magnitude > 1.0f)
  673. {
  674. Debug.LogWarning("Your cube map center offset's magnitude is greater than 1, which will cause some cube map pixel always invisible .");
  675. return false;
  676. }
  677. }
  678. // Cylinder overlay sanity checking
  679. if (currentOverlayShape == OverlayShape.Cylinder)
  680. {
  681. float arcAngle = scale.x / scale.z / (float)Math.PI * 180.0f;
  682. if (arcAngle > 180.0f)
  683. {
  684. Debug.LogWarning("Cylinder overlay's arc angle has to be below 180 degree, current arc angle is " + arcAngle + " degree." );
  685. return false;
  686. }
  687. }
  688. return true;
  689. }
  690. void OpenVROverlayUpdate(Vector3 scale, OVRPose pose)
  691. {
  692. OVR.OpenVR.CVROverlay overlayRef = OVR.OpenVR.OpenVR.Overlay;
  693. if (overlayRef == null)
  694. return;
  695. Texture overlayTex = textures[0];
  696. if (overlayTex != null)
  697. {
  698. OVR.OpenVR.EVROverlayError error = overlayRef.ShowOverlay(OpenVROverlayHandle);
  699. if (error == OVR.OpenVR.EVROverlayError.InvalidHandle || error == OVR.OpenVR.EVROverlayError.UnknownOverlay)
  700. {
  701. if (overlayRef.FindOverlay(OpenVROverlayKey + transform.name, ref OpenVROverlayHandle) != OVR.OpenVR.EVROverlayError.None)
  702. return;
  703. }
  704. OVR.OpenVR.Texture_t tex = new OVR.OpenVR.Texture_t();
  705. tex.handle = overlayTex.GetNativeTexturePtr();
  706. tex.eType = SystemInfo.graphicsDeviceVersion.StartsWith("OpenGL") ? OVR.OpenVR.ETextureType.OpenGL : OVR.OpenVR.ETextureType.DirectX;
  707. tex.eColorSpace = OVR.OpenVR.EColorSpace.Auto;
  708. overlayRef.SetOverlayTexture(OpenVROverlayHandle, ref tex);
  709. OVR.OpenVR.VRTextureBounds_t textureBounds = new OVR.OpenVR.VRTextureBounds_t();
  710. textureBounds.uMin = (0 + OpenVRUVOffsetAndScale.x) * OpenVRUVOffsetAndScale.z;
  711. textureBounds.vMin = (1 + OpenVRUVOffsetAndScale.y) * OpenVRUVOffsetAndScale.w;
  712. textureBounds.uMax = (1 + OpenVRUVOffsetAndScale.x) * OpenVRUVOffsetAndScale.z;
  713. textureBounds.vMax = (0 + OpenVRUVOffsetAndScale.y) * OpenVRUVOffsetAndScale.w;
  714. overlayRef.SetOverlayTextureBounds(OpenVROverlayHandle, ref textureBounds);
  715. OVR.OpenVR.HmdVector2_t vecMouseScale = new OVR.OpenVR.HmdVector2_t();
  716. vecMouseScale.v0 = OpenVRMouseScale.x;
  717. vecMouseScale.v1 = OpenVRMouseScale.y;
  718. overlayRef.SetOverlayMouseScale(OpenVROverlayHandle, ref vecMouseScale);
  719. overlayRef.SetOverlayWidthInMeters(OpenVROverlayHandle, scale.x);
  720. Matrix4x4 mat44 = Matrix4x4.TRS(pose.position, pose.orientation, Vector3.one);
  721. OVR.OpenVR.HmdMatrix34_t pose34 = mat44.ConvertToHMDMatrix34();
  722. overlayRef.SetOverlayTransformAbsolute(OpenVROverlayHandle, OVR.OpenVR.ETrackingUniverseOrigin.TrackingUniverseStanding, ref pose34);
  723. }
  724. }
  725. private Vector4 OpenVRUVOffsetAndScale = new Vector4(0, 0, 1.0f, 1.0f);
  726. private Vector2 OpenVRMouseScale = new Vector2(1, 1);
  727. private OVRManager.XRDevice constructedOverlayXRDevice;
  728. private bool xrDeviceConstructed = false;
  729. void LateUpdate()
  730. {
  731. if (!OVRManager.OVRManagerinitialized)
  732. return;
  733. if (!xrDeviceConstructed)
  734. {
  735. InitOVROverlay();
  736. }
  737. if (OVRManager.loadedXRDevice != constructedOverlayXRDevice)
  738. {
  739. Debug.LogError("Warning-XR Device was switched during runtime with overlays still enabled. When doing so, all overlays constructed with the previous XR device must first be disabled.");
  740. return;
  741. }
  742. // The overlay must be specified every eye frame, because it is positioned relative to the
  743. // current head location. If frames are dropped, it will be time warped appropriately,
  744. // just like the eye buffers.
  745. if (currentOverlayType == OverlayType.None || ((textures.Length < texturesPerStage || textures[0] == null) && !isExternalSurface))
  746. return;
  747. OVRPose pose = OVRPose.identity;
  748. Vector3 scale = Vector3.one;
  749. bool overlay = false;
  750. bool headLocked = false;
  751. if (!ComputeSubmit(ref pose, ref scale, ref overlay, ref headLocked))
  752. return;
  753. if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
  754. {
  755. if (currentOverlayShape == OverlayShape.Quad)
  756. OpenVROverlayUpdate(scale, pose);
  757. //No more Overlay processing is required if we're on OpenVR
  758. return;
  759. }
  760. OVRPlugin.LayerDesc newDesc = GetCurrentLayerDesc();
  761. bool isHdr = (newDesc.Format == OVRPlugin.EyeTextureFormat.R16G16B16A16_FP);
  762. // If the layer and textures are created but sizes differ, force re-creating them
  763. if (!layerDesc.TextureSize.Equals(newDesc.TextureSize) && layerId > 0)
  764. {
  765. DestroyLayerTextures();
  766. DestroyLayer();
  767. }
  768. bool createdLayer = CreateLayer(newDesc.MipLevels, newDesc.SampleCount, newDesc.Format, newDesc.LayerFlags, newDesc.TextureSize, newDesc.Shape);
  769. if (layerIndex == -1 || layerId <= 0)
  770. return;
  771. bool useMipmaps = (newDesc.MipLevels > 1);
  772. createdLayer |= CreateLayerTextures(useMipmaps, newDesc.TextureSize, isHdr);
  773. if (!isExternalSurface && (layerTextures[0].appTexture as RenderTexture != null))
  774. isDynamic = true;
  775. if (!LatchLayerTextures())
  776. return;
  777. // Don't populate the same frame image twice.
  778. if (frameIndex > prevFrameIndex)
  779. {
  780. int stage = frameIndex % stageCount;
  781. if (!PopulateLayer (newDesc.MipLevels, isHdr, newDesc.TextureSize, newDesc.SampleCount, stage))
  782. return;
  783. }
  784. bool isOverlayVisible = SubmitLayer(overlay, headLocked, noDepthBufferTesting, pose, scale, frameIndex);
  785. prevFrameIndex = frameIndex;
  786. if (isDynamic)
  787. ++frameIndex;
  788. // Backward compatibility: show regular renderer if overlay isn't visible.
  789. if (rend)
  790. rend.enabled = !isOverlayVisible;
  791. }
  792. #endregion
  793. }