Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

306 lines
13 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. using UnityEngine;
  14. using System.Collections;
  15. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
  16. public abstract class OVRCameraComposition : OVRComposition {
  17. protected GameObject cameraFramePlaneObject = null;
  18. protected float cameraFramePlaneDistance;
  19. protected readonly bool hasCameraDeviceOpened = false;
  20. protected readonly bool useDynamicLighting = false;
  21. internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
  22. private Mesh boundaryMesh = null;
  23. private float boundaryMeshTopY = 0.0f;
  24. private float boundaryMeshBottomY = 0.0f;
  25. private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
  26. protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
  27. : base(parentObject, mainCamera)
  28. {
  29. cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);
  30. Debug.Assert(!hasCameraDeviceOpened);
  31. Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
  32. hasCameraDeviceOpened = false;
  33. useDynamicLighting = inUseDynamicLighting;
  34. bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
  35. if (useDynamicLighting && !cameraSupportsDepth)
  36. {
  37. Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
  38. }
  39. if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
  40. {
  41. OVRPlugin.CameraExtrinsics extrinsics;
  42. OVRPlugin.CameraIntrinsics intrinsics;
  43. OVRPlugin.Posef calibrationRawPose;
  44. if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
  45. {
  46. OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
  47. }
  48. if (useDynamicLighting)
  49. {
  50. OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
  51. OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
  52. if (depthQuality == OVRManager.DepthQuality.Low)
  53. {
  54. quality = OVRPlugin.CameraDeviceDepthQuality.Low;
  55. }
  56. else if (depthQuality == OVRManager.DepthQuality.Medium)
  57. {
  58. quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
  59. }
  60. else if (depthQuality == OVRManager.DepthQuality.High)
  61. {
  62. quality = OVRPlugin.CameraDeviceDepthQuality.High;
  63. }
  64. else
  65. {
  66. Debug.LogWarning("Unknown depth quality");
  67. }
  68. OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
  69. }
  70. Debug.LogFormat("Opening camera device {0}", cameraDevice);
  71. OVRPlugin.OpenCameraDevice(cameraDevice);
  72. if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
  73. {
  74. Debug.LogFormat("Opened camera device {0}", cameraDevice);
  75. hasCameraDeviceOpened = true;
  76. }
  77. }
  78. }
  79. public override void Cleanup()
  80. {
  81. OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
  82. if (hasCameraDeviceOpened)
  83. {
  84. Debug.LogFormat("Close camera device {0}", cameraDevice);
  85. OVRPlugin.CloseCameraDevice(cameraDevice);
  86. }
  87. }
  88. public override void RecenterPose()
  89. {
  90. boundaryMesh = null;
  91. }
  92. protected void RefreshCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, bool useDynamicLighting)
  93. {
  94. OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
  95. Debug.Assert(cameraFramePlaneObject == null);
  96. cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
  97. cameraFramePlaneObject.name = "OculusMRC_CameraFrame";
  98. cameraFramePlaneObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  99. cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
  100. cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
  101. Material cameraFrameMaterial = new Material(Shader.Find(useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
  102. cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
  103. cameraFrameMaterial.SetColor("_Color", Color.white);
  104. cameraFrameMaterial.SetFloat("_Visible", 0.0f);
  105. cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
  106. cameraFramePlaneObject.SetActive(true);
  107. OVRCameraFrameCompositionManager cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
  108. cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
  109. cameraFrameCompositionManager.composition = this;
  110. }
  111. private bool nullcameraRigWarningDisplayed = false;
  112. protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, RenderTexture boundaryMeshMaskTexture)
  113. {
  114. bool hasError = false;
  115. Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
  116. Texture2D colorTexture = Texture2D.blackTexture;
  117. Texture2D depthTexture = Texture2D.whiteTexture;
  118. if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
  119. {
  120. colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
  121. }
  122. else
  123. {
  124. Debug.LogWarning("Camera: color frame not ready");
  125. hasError = true;
  126. }
  127. bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
  128. if (useDynamicLighting && cameraSupportsDepth)
  129. {
  130. if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
  131. {
  132. depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
  133. }
  134. else
  135. {
  136. Debug.LogWarning("Camera: depth frame not ready");
  137. hasError = true;
  138. }
  139. }
  140. if (!hasError)
  141. {
  142. Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
  143. float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
  144. cameraFramePlaneDistance = distance;
  145. cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
  146. cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
  147. float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
  148. cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
  149. float worldHeight = distance * tanFov * 2.0f;
  150. float worldWidth = worldHeight * mixedRealityCamera.aspect;
  151. float cullingDistance = float.MaxValue;
  152. if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
  153. {
  154. RefreshBoundaryMesh(mixedRealityCamera, out cullingDistance);
  155. }
  156. cameraFrameMaterial.mainTexture = colorTexture;
  157. cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
  158. cameraFrameMaterial.SetVector("_FlipParams", new Vector4((OVRManager.instance.flipCameraFrameHorizontally ? 1.0f : 0.0f), (OVRManager.instance.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
  159. cameraFrameMaterial.SetColor("_ChromaKeyColor", OVRManager.instance.chromaKeyColor);
  160. cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", OVRManager.instance.chromaKeySimilarity);
  161. cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", OVRManager.instance.chromaKeySmoothRange);
  162. cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", OVRManager.instance.chromaKeySpillRange);
  163. cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
  164. cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
  165. cameraFrameMaterial.SetFloat("_SmoothFactor", OVRManager.instance.dynamicLightingSmoothFactor);
  166. cameraFrameMaterial.SetFloat("_DepthVariationClamp", OVRManager.instance.dynamicLightingDepthVariationClampingValue);
  167. cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
  168. if (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
  169. {
  170. cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
  171. }
  172. else
  173. {
  174. if (cameraRig == null)
  175. {
  176. if (!nullcameraRigWarningDisplayed)
  177. {
  178. Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
  179. nullcameraRigWarningDisplayed = true;
  180. }
  181. cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
  182. }
  183. else
  184. {
  185. if (nullcameraRigWarningDisplayed)
  186. {
  187. Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
  188. nullcameraRigWarningDisplayed = false;
  189. }
  190. cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
  191. }
  192. }
  193. }
  194. }
  195. protected void RefreshBoundaryMesh(Camera camera, out float cullingDistance)
  196. {
  197. float depthTolerance = OVRManager.instance.virtualGreenScreenApplyDepthCulling ? OVRManager.instance.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
  198. cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType)) + depthTolerance;
  199. if (boundaryMesh == null || boundaryMeshType != OVRManager.instance.virtualGreenScreenType || boundaryMeshTopY != OVRManager.instance.virtualGreenScreenTopY || boundaryMeshBottomY != OVRManager.instance.virtualGreenScreenBottomY)
  200. {
  201. boundaryMeshTopY = OVRManager.instance.virtualGreenScreenTopY;
  202. boundaryMeshBottomY = OVRManager.instance.virtualGreenScreenBottomY;
  203. boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
  204. boundaryMeshType = OVRManager.instance.virtualGreenScreenType;
  205. // Creating GameObject for testing purpose only
  206. //GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
  207. //boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
  208. //boundaryMeshObject.AddComponent<MeshRenderer>();
  209. }
  210. }
  211. public class OVRCameraFrameCompositionManager : MonoBehaviour
  212. {
  213. public GameObject cameraFrameGameObj;
  214. public OVRCameraComposition composition;
  215. public RenderTexture boundaryMeshMaskTexture;
  216. private Material cameraFrameMaterial;
  217. private Material whiteMaterial;
  218. void Start()
  219. {
  220. Shader shader = Shader.Find("Oculus/Unlit");
  221. if (!shader)
  222. {
  223. Debug.LogError("Oculus/Unlit shader does not exist");
  224. return;
  225. }
  226. whiteMaterial = new Material(shader);
  227. whiteMaterial.color = Color.white;
  228. }
  229. void OnPreRender()
  230. {
  231. if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
  232. {
  233. RenderTexture oldRT = RenderTexture.active;
  234. RenderTexture.active = boundaryMeshMaskTexture;
  235. // The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
  236. GL.PushMatrix();
  237. GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
  238. GL.Clear(false, true, Color.black);
  239. for (int i = 0; i < whiteMaterial.passCount; ++i)
  240. {
  241. if (whiteMaterial.SetPass(i))
  242. {
  243. Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
  244. }
  245. }
  246. GL.PopMatrix();
  247. RenderTexture.active = oldRT;
  248. }
  249. if (cameraFrameGameObj)
  250. {
  251. if (cameraFrameMaterial == null)
  252. cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
  253. cameraFrameMaterial.SetFloat("_Visible", 1.0f);
  254. }
  255. }
  256. void OnPostRender()
  257. {
  258. if (cameraFrameGameObj)
  259. {
  260. Debug.Assert(cameraFrameMaterial);
  261. cameraFrameMaterial.SetFloat("_Visible", 0.0f);
  262. }
  263. }
  264. }
  265. }
  266. #endif