Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

430 lines
14 KiB

  1. /************************************************************************************
  2. Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
  3. Licensed under the Oculus Utilities SDK License Version 1.31 (the "License"); you may not use
  4. the Utilities SDK except in compliance with the License, which is provided at the time of installation
  5. or download, or which otherwise accompanies this software in either electronic or hard copy form.
  6. You may obtain a copy of the License at
  7. https://developer.oculus.com/licenses/utilities-1.31
  8. Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
  9. under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  10. ANY KIND, either express or implied. See the License for the specific language governing
  11. permissions and limitations under the License.
  12. ************************************************************************************/
  13. #if UNITY_ANDROID && !UNITY_EDITOR
  14. #define OVR_ANDROID_MRC
  15. #endif
  16. using UnityEngine;
  17. using System.Collections.Generic;
  18. using System.Threading;
  19. #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
  20. public class OVRExternalComposition : OVRComposition
  21. {
  22. private GameObject previousMainCameraObject = null;
  23. public GameObject foregroundCameraGameObject = null;
  24. public Camera foregroundCamera = null;
  25. public GameObject backgroundCameraGameObject = null;
  26. public Camera backgroundCamera = null;
  27. #if OVR_ANDROID_MRC
  28. public AudioListener audioListener;
  29. public OVRMRAudioFilter audioFilter;
  30. public RenderTexture[] mrcRenderTextureArray = new RenderTexture[2];
  31. public int frameIndex;
  32. public int lastMrcEncodeFrameSyncId;
  33. #endif
  34. public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
  35. public OVRExternalComposition(GameObject parentObject, Camera mainCamera)
  36. : base(parentObject, mainCamera)
  37. {
  38. #if OVR_ANDROID_MRC
  39. int frameWidth;
  40. int frameHeight;
  41. OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
  42. Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", frameWidth, frameHeight);
  43. for (int i=0; i<2; ++i)
  44. {
  45. mrcRenderTextureArray[i] = new RenderTexture(frameWidth, frameHeight, 24, RenderTextureFormat.ARGB32);
  46. mrcRenderTextureArray[i].Create();
  47. }
  48. frameIndex = 0;
  49. lastMrcEncodeFrameSyncId = -1;
  50. #endif
  51. RefreshCameraObjects(parentObject, mainCamera);
  52. }
  53. private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera)
  54. {
  55. if (mainCamera.gameObject != previousMainCameraObject)
  56. {
  57. Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
  58. OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
  59. backgroundCamera = null;
  60. OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
  61. foregroundCamera = null;
  62. RefreshCameraRig(parentObject, mainCamera);
  63. Debug.Assert(backgroundCameraGameObject == null);
  64. backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
  65. backgroundCameraGameObject.name = "OculusMRC_BackgroundCamera";
  66. backgroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  67. if (backgroundCameraGameObject.GetComponent<AudioListener>())
  68. {
  69. Object.Destroy(backgroundCameraGameObject.GetComponent<AudioListener>());
  70. }
  71. if (backgroundCameraGameObject.GetComponent<OVRManager>())
  72. {
  73. Object.Destroy(backgroundCameraGameObject.GetComponent<OVRManager>());
  74. }
  75. backgroundCamera = backgroundCameraGameObject.GetComponent<Camera>();
  76. backgroundCamera.tag = "Untagged";
  77. backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
  78. backgroundCamera.depth = 99990.0f;
  79. backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
  80. backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  81. #if OVR_ANDROID_MRC
  82. backgroundCamera.targetTexture = mrcRenderTextureArray[0];
  83. #endif
  84. Debug.Assert(foregroundCameraGameObject == null);
  85. foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
  86. foregroundCameraGameObject.name = "OculusMRC_ForgroundCamera";
  87. foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
  88. if (foregroundCameraGameObject.GetComponent<AudioListener>())
  89. {
  90. Object.Destroy(foregroundCameraGameObject.GetComponent<AudioListener>());
  91. }
  92. if (foregroundCameraGameObject.GetComponent<OVRManager>())
  93. {
  94. Object.Destroy(foregroundCameraGameObject.GetComponent<OVRManager>());
  95. }
  96. foregroundCamera = foregroundCameraGameObject.GetComponent<Camera>();
  97. foregroundCamera.tag = "Untagged";
  98. foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
  99. foregroundCamera.depth = backgroundCamera.depth + 1.0f; // enforce the forground be rendered after the background
  100. foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
  101. foregroundCamera.clearFlags = CameraClearFlags.Color;
  102. #if OVR_ANDROID_MRC
  103. foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorQuest;
  104. #else
  105. foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorRift;
  106. #endif
  107. foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  108. #if OVR_ANDROID_MRC
  109. foregroundCamera.targetTexture = mrcRenderTextureArray[0];
  110. #endif
  111. previousMainCameraObject = mainCamera.gameObject;
  112. }
  113. }
  114. #if OVR_ANDROID_MRC
  115. private void RefreshAudioFilter()
  116. {
  117. if (cameraRig != null && (audioListener == null || !audioListener.enabled || !audioListener.gameObject.activeInHierarchy))
  118. {
  119. CleanupAudioFilter();
  120. AudioListener tmpAudioListener = cameraRig.centerEyeAnchor.gameObject.activeInHierarchy ? cameraRig.centerEyeAnchor.GetComponent<AudioListener>() : null;
  121. if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
  122. if (tmpAudioListener == null)
  123. {
  124. if (Camera.main != null && Camera.main.gameObject.activeInHierarchy)
  125. {
  126. tmpAudioListener = Camera.main.GetComponent<AudioListener>();
  127. if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
  128. }
  129. }
  130. if (tmpAudioListener == null)
  131. {
  132. Object[] allListeners = Object.FindObjectsOfType<AudioListener>();
  133. foreach (var l in allListeners)
  134. {
  135. AudioListener al = l as AudioListener;
  136. if (al != null && al.enabled && al.gameObject.activeInHierarchy)
  137. {
  138. tmpAudioListener = al;
  139. break;
  140. }
  141. }
  142. }
  143. if (tmpAudioListener == null)
  144. {
  145. Debug.LogWarning("[OVRExternalComposition] No AudioListener in scene");
  146. }
  147. else
  148. {
  149. Debug.LogFormat("[OVRExternalComposition] AudioListener found, obj {0}", tmpAudioListener.gameObject.name);
  150. }
  151. audioListener = tmpAudioListener;
  152. audioFilter = audioListener.gameObject.AddComponent<OVRMRAudioFilter>();
  153. audioFilter.composition = this;
  154. Debug.LogFormat("OVRMRAudioFilter added");
  155. }
  156. }
  157. private float[] cachedAudioDataArray = null;
  158. private int CastMrcFrame(int castTextureIndex)
  159. {
  160. int audioFrames;
  161. int audioChannels;
  162. GetAndResetAudioData(ref cachedAudioDataArray, out audioFrames, out audioChannels);
  163. int syncId = -1;
  164. //Debug.Log("EncodeFrameThreadObject EncodeMrcFrame");
  165. bool ret = false;
  166. if (OVRPlugin.Media.GetMrcInputVideoBufferType() == OVRPlugin.Media.InputVideoBufferType.TextureHandle)
  167. {
  168. ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex].GetNativeTexturePtr(), cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, ref syncId);
  169. }
  170. else
  171. {
  172. ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex], cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, ref syncId);
  173. }
  174. if (!ret)
  175. {
  176. Debug.LogWarning("EncodeMrcFrame failed. Likely caused by OBS plugin disconnection");
  177. return -1;
  178. }
  179. return syncId;
  180. }
  181. private void SetCameraTargetTexture(int drawTextureIndex)
  182. {
  183. RenderTexture texture = mrcRenderTextureArray[drawTextureIndex];
  184. if (backgroundCamera.targetTexture != texture)
  185. {
  186. backgroundCamera.targetTexture = texture;
  187. }
  188. if (foregroundCamera.targetTexture != texture)
  189. {
  190. foregroundCamera.targetTexture = texture;
  191. }
  192. }
  193. #endif
  194. public override void Update(GameObject gameObject, Camera mainCamera)
  195. {
  196. RefreshCameraObjects(gameObject, mainCamera);
  197. OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
  198. #if OVR_ANDROID_MRC
  199. RefreshAudioFilter();
  200. int drawTextureIndex = (frameIndex / 2) % 2;
  201. int castTextureIndex = 1 - drawTextureIndex;
  202. backgroundCamera.enabled = (frameIndex % 2) == 0;
  203. foregroundCamera.enabled = (frameIndex % 2) == 1;
  204. if (frameIndex % 2 == 0)
  205. {
  206. if (lastMrcEncodeFrameSyncId != -1)
  207. {
  208. OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
  209. lastMrcEncodeFrameSyncId = -1;
  210. }
  211. lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
  212. SetCameraTargetTexture(drawTextureIndex);
  213. }
  214. ++ frameIndex;
  215. #endif
  216. backgroundCamera.clearFlags = mainCamera.clearFlags;
  217. backgroundCamera.backgroundColor = mainCamera.backgroundColor;
  218. backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  219. backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
  220. backgroundCamera.farClipPlane = mainCamera.farClipPlane;
  221. foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
  222. foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
  223. foregroundCamera.farClipPlane = mainCamera.farClipPlane;
  224. if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
  225. {
  226. OVRPose worldSpacePose = new OVRPose();
  227. OVRPose trackingSpacePose = new OVRPose();
  228. trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
  229. OVRMixedReality.fakeCameraEyeLevelPosition :
  230. OVRMixedReality.fakeCameraFloorLevelPosition;
  231. trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
  232. worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
  233. backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
  234. backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
  235. foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
  236. foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
  237. if (cameraInTrackingSpace)
  238. {
  239. backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  240. foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  241. }
  242. else
  243. {
  244. backgroundCamera.transform.FromOVRPose(worldSpacePose);
  245. foregroundCamera.transform.FromOVRPose(worldSpacePose);
  246. }
  247. }
  248. else
  249. {
  250. OVRPlugin.CameraExtrinsics extrinsics;
  251. OVRPlugin.CameraIntrinsics intrinsics;
  252. OVRPlugin.Posef calibrationRawPose;
  253. // So far, only support 1 camera for MR and always use camera index 0
  254. if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics, out calibrationRawPose))
  255. {
  256. float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
  257. float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
  258. backgroundCamera.fieldOfView = fovY;
  259. backgroundCamera.aspect = aspect;
  260. foregroundCamera.fieldOfView = fovY;
  261. foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
  262. if (cameraInTrackingSpace)
  263. {
  264. OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
  265. backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  266. foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
  267. }
  268. else
  269. {
  270. OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics, calibrationRawPose);
  271. backgroundCamera.transform.FromOVRPose(worldSpacePose);
  272. foregroundCamera.transform.FromOVRPose(worldSpacePose);
  273. }
  274. }
  275. else
  276. {
  277. Debug.LogError("Failed to get external camera information");
  278. return;
  279. }
  280. }
  281. Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
  282. float clipDistance = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
  283. foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
  284. }
  285. #if OVR_ANDROID_MRC
  286. private void CleanupAudioFilter()
  287. {
  288. if (audioFilter)
  289. {
  290. audioFilter.composition = null;
  291. Object.Destroy(audioFilter);
  292. Debug.LogFormat("OVRMRAudioFilter destroyed");
  293. audioFilter = null;
  294. }
  295. }
  296. #endif
  297. public override void Cleanup()
  298. {
  299. OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
  300. backgroundCamera = null;
  301. OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
  302. foregroundCamera = null;
  303. Debug.Log("ExternalComposition deactivated");
  304. #if OVR_ANDROID_MRC
  305. if (lastMrcEncodeFrameSyncId != -1)
  306. {
  307. OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
  308. lastMrcEncodeFrameSyncId = -1;
  309. }
  310. CleanupAudioFilter();
  311. for (int i=0; i<2; ++i)
  312. {
  313. mrcRenderTextureArray[i].Release();
  314. mrcRenderTextureArray[i] = null;
  315. }
  316. frameIndex = 0;
  317. #endif
  318. }
  319. private readonly object audioDataLock = new object();
  320. private List<float> cachedAudioData = new List<float>(16384);
  321. private int cachedChannels = 0;
  322. public void CacheAudioData(float[] data, int channels)
  323. {
  324. lock(audioDataLock)
  325. {
  326. if (channels != cachedChannels)
  327. {
  328. cachedAudioData.Clear();
  329. }
  330. cachedChannels = channels;
  331. cachedAudioData.AddRange(data);
  332. //Debug.LogFormat("[CacheAudioData] dspTime {0} indata {1} channels {2} accu_len {3}", AudioSettings.dspTime, data.Length, channels, cachedAudioData.Count);
  333. }
  334. }
  335. public void GetAndResetAudioData(ref float[] audioData, out int audioFrames, out int channels)
  336. {
  337. lock(audioDataLock)
  338. {
  339. //Debug.LogFormat("[GetAndResetAudioData] dspTime {0} accu_len {1}", AudioSettings.dspTime, cachedAudioData.Count);
  340. if (audioData == null || audioData.Length < cachedAudioData.Count)
  341. {
  342. audioData = new float[cachedAudioData.Capacity];
  343. }
  344. cachedAudioData.CopyTo(audioData);
  345. audioFrames = cachedAudioData.Count;
  346. channels = cachedChannels;
  347. cachedAudioData.Clear();
  348. }
  349. }
  350. }
  351. #if OVR_ANDROID_MRC
  352. public class OVRMRAudioFilter : MonoBehaviour
  353. {
  354. private bool running = false;
  355. public OVRExternalComposition composition;
  356. void Start()
  357. {
  358. running = true;
  359. }
  360. void OnAudioFilterRead(float[] data, int channels)
  361. {
  362. if (!running)
  363. return;
  364. if (composition != null)
  365. {
  366. composition.CacheAudioData(data, channels);
  367. }
  368. }
  369. }
  370. #endif
  371. #endif