Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

218 lines
7.0 KiB

  1. //======= Copyright (c) Valve Corporation, All rights reserved. ===============
  2. //
  3. // Purpose: Provides access to video feed and poses of tracked cameras.
  4. //
  5. // Usage:
  6. // var source = SteamVR_TrackedCamera.Distorted();
  7. // var source = SteamVR_TrackedCamera.Undistorted();
  8. // or
  9. // var undistorted = true; // or false
  10. // var source = SteamVR_TrackedCamera.Source(undistorted);
  11. //
  12. // - Distorted feeds are the decoded images from the camera.
  13. // - Undistorted feeds correct for the camera lens distortion (a.k.a. fisheye)
  14. // to make straight lines straight.
  15. //
  16. // VideoStreamTexture objects must be symmetrically Acquired and Released to
  17. // ensure the video stream is activated, and shutdown properly once there are
  18. // no more consumers. You only need to Acquire once when starting to use a
  19. // stream, and Release when you are done using it (as opposed to every frame).
  20. //
  21. //=============================================================================
  22. using UnityEngine;
  23. using Valve.VR;
  24. public class SteamVR_TrackedCamera
  25. {
  26. public class VideoStreamTexture
  27. {
  28. public VideoStreamTexture(uint deviceIndex, bool undistorted)
  29. {
  30. this.undistorted = undistorted;
  31. videostream = Stream(deviceIndex);
  32. }
  33. public bool undistorted { get; private set; }
  34. public uint deviceIndex { get { return videostream.deviceIndex; } }
  35. public bool hasCamera { get { return videostream.hasCamera; } }
  36. public bool hasTracking { get { Update(); return header.standingTrackedDevicePose.bPoseIsValid; } }
  37. public uint frameId { get { Update(); return header.nFrameSequence; } }
  38. public VRTextureBounds_t frameBounds { get; private set; }
  39. public EVRTrackedCameraFrameType frameType { get { return undistorted ? EVRTrackedCameraFrameType.Undistorted : EVRTrackedCameraFrameType.Distorted; } }
  40. Texture2D _texture;
  41. public Texture2D texture { get { Update(); return _texture; } }
  42. public SteamVR_Utils.RigidTransform transform { get { Update(); return new SteamVR_Utils.RigidTransform(header.standingTrackedDevicePose.mDeviceToAbsoluteTracking); } }
  43. public Vector3 velocity { get { Update(); var pose = header.standingTrackedDevicePose; return new Vector3(pose.vVelocity.v0, pose.vVelocity.v1, -pose.vVelocity.v2); } }
  44. public Vector3 angularVelocity { get { Update(); var pose = header.standingTrackedDevicePose; return new Vector3(-pose.vAngularVelocity.v0, -pose.vAngularVelocity.v1, pose.vAngularVelocity.v2); } }
  45. public TrackedDevicePose_t GetPose() { Update(); return header.standingTrackedDevicePose; }
  46. public ulong Acquire()
  47. {
  48. return videostream.Acquire();
  49. }
  50. public ulong Release()
  51. {
  52. var result = videostream.Release();
  53. if (videostream.handle == 0)
  54. {
  55. Object.Destroy(_texture);
  56. _texture = null;
  57. }
  58. return result;
  59. }
  60. int prevFrameCount = -1;
  61. void Update()
  62. {
  63. if (Time.frameCount == prevFrameCount)
  64. return;
  65. prevFrameCount = Time.frameCount;
  66. if (videostream.handle == 0)
  67. return;
  68. var vr = SteamVR.instance;
  69. if (vr == null)
  70. return;
  71. var trackedCamera = OpenVR.TrackedCamera;
  72. if (trackedCamera == null)
  73. return;
  74. var nativeTex = System.IntPtr.Zero;
  75. var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
  76. var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());
  77. if (vr.textureType == ETextureType.OpenGL)
  78. {
  79. if (glTextureId != 0)
  80. trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);
  81. if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
  82. return;
  83. nativeTex = (System.IntPtr)glTextureId;
  84. }
  85. else if (vr.textureType == ETextureType.DirectX)
  86. {
  87. if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
  88. return;
  89. }
  90. if (_texture == null)
  91. {
  92. _texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
  93. uint width = 0, height = 0;
  94. var frameBounds = new VRTextureBounds_t();
  95. if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
  96. {
  97. // Account for textures being upside-down in Unity.
  98. frameBounds.vMin = 1.0f - frameBounds.vMin;
  99. frameBounds.vMax = 1.0f - frameBounds.vMax;
  100. this.frameBounds = frameBounds;
  101. }
  102. }
  103. else
  104. {
  105. _texture.UpdateExternalTexture(nativeTex);
  106. }
  107. }
  108. uint glTextureId;
  109. VideoStream videostream;
  110. CameraVideoStreamFrameHeader_t header;
  111. }
  112. #region Top level accessors.
  113. public static VideoStreamTexture Distorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  114. {
  115. if (distorted == null)
  116. distorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
  117. if (distorted[deviceIndex] == null)
  118. distorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, false);
  119. return distorted[deviceIndex];
  120. }
  121. public static VideoStreamTexture Undistorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  122. {
  123. if (undistorted == null)
  124. undistorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
  125. if (undistorted[deviceIndex] == null)
  126. undistorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, true);
  127. return undistorted[deviceIndex];
  128. }
  129. public static VideoStreamTexture Source(bool undistorted, int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
  130. {
  131. return undistorted ? Undistorted(deviceIndex) : Distorted(deviceIndex);
  132. }
  133. private static VideoStreamTexture[] distorted, undistorted;
  134. #endregion
  135. #region Internal class to manage lifetime of video streams (per device).
  136. class VideoStream
  137. {
  138. public VideoStream(uint deviceIndex)
  139. {
  140. this.deviceIndex = deviceIndex;
  141. var trackedCamera = OpenVR.TrackedCamera;
  142. if (trackedCamera != null)
  143. trackedCamera.HasCamera(deviceIndex, ref _hasCamera);
  144. }
  145. public uint deviceIndex { get; private set; }
  146. ulong _handle;
  147. public ulong handle { get { return _handle; } }
  148. bool _hasCamera;
  149. public bool hasCamera { get { return _hasCamera; } }
  150. ulong refCount;
  151. public ulong Acquire()
  152. {
  153. if (_handle == 0 && hasCamera)
  154. {
  155. var trackedCamera = OpenVR.TrackedCamera;
  156. if (trackedCamera != null)
  157. trackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);
  158. }
  159. return ++refCount;
  160. }
  161. public ulong Release()
  162. {
  163. if (refCount > 0 && --refCount == 0 && _handle != 0)
  164. {
  165. var trackedCamera = OpenVR.TrackedCamera;
  166. if (trackedCamera != null)
  167. trackedCamera.ReleaseVideoStreamingService(_handle);
  168. _handle = 0;
  169. }
  170. return refCount;
  171. }
  172. }
  173. static VideoStream Stream(uint deviceIndex)
  174. {
  175. if (videostreams == null)
  176. videostreams = new VideoStream[OpenVR.k_unMaxTrackedDeviceCount];
  177. if (videostreams[deviceIndex] == null)
  178. videostreams[deviceIndex] = new VideoStream(deviceIndex);
  179. return videostreams[deviceIndex];
  180. }
  181. static VideoStream[] videostreams;
  182. #endregion
  183. }