Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1344 lines
47 KiB

  1. using UnityEngine;
  2. using System.Collections;
  3. using System;
  4. using Oculus.Avatar;
  5. using System.Runtime.InteropServices;
  6. using System.Collections.Generic;
  7. #if UNITY_EDITOR
  8. using UnityEditor;
  9. #endif
  10. #if AVATAR_INTERNAL
  11. using UnityEngine.Events;
  12. #endif
  13. [System.Serializable]
  14. public class AvatarLayer
  15. {
  16. public int layerIndex;
  17. }
  18. #if UNITY_EDITOR
  19. [CustomPropertyDrawer(typeof(AvatarLayer))]
  20. public class AvatarLayerPropertyDrawer : PropertyDrawer
  21. {
  22. public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
  23. {
  24. EditorGUI.BeginProperty(position, GUIContent.none, property);
  25. SerializedProperty layerIndex = property.FindPropertyRelative("layerIndex");
  26. position = EditorGUI.PrefixLabel(position, GUIUtility.GetControlID(FocusType.Passive), label);
  27. layerIndex.intValue = EditorGUI.LayerField(position, layerIndex.intValue);
  28. EditorGUI.EndProperty();
  29. }
  30. }
  31. #endif
  32. [System.Serializable]
  33. public class PacketRecordSettings
  34. {
  35. internal bool RecordingFrames = false;
  36. public float UpdateRate = 1f / 30f; // 30 hz update of packets
  37. internal float AccumulatedTime;
  38. };
  39. public class OvrAvatar : MonoBehaviour
  40. {
  41. [Header("Avatar")]
  42. public IntPtr sdkAvatar = IntPtr.Zero;
  43. public string oculusUserID;
  44. public OvrAvatarDriver Driver;
  45. [Header("Capabilities")]
  46. public bool EnableBody = true;
  47. public bool EnableHands = true;
  48. public bool EnableBase = true;
  49. public bool EnableExpressive = false;
  50. [Header("Network")]
  51. public bool RecordPackets;
  52. public bool UseSDKPackets = true;
  53. public PacketRecordSettings PacketSettings = new PacketRecordSettings();
  54. [Header("Visibility")]
  55. public bool StartWithControllers;
  56. public AvatarLayer FirstPersonLayer;
  57. public AvatarLayer ThirdPersonLayer;
  58. public bool ShowFirstPerson = true;
  59. public bool ShowThirdPerson;
  60. internal ovrAvatarCapabilities Capabilities = ovrAvatarCapabilities.Body;
  61. [Header("Performance")]
  62. #if UNITY_ANDROID
  63. [Tooltip(
  64. "LOD mesh complexity and texture resolution. Highest LOD recommended on PC and simple mobile apps." +
  65. " Medium LOD recommended on mobile devices or for background characters on PC." +
  66. " Lowest LOD recommended for background characters on mobile.")]
  67. [SerializeField]
  68. internal ovrAvatarAssetLevelOfDetail LevelOfDetail = ovrAvatarAssetLevelOfDetail.Medium;
  69. #else
  70. [SerializeField]
  71. internal ovrAvatarAssetLevelOfDetail LevelOfDetail = ovrAvatarAssetLevelOfDetail.Highest;
  72. #endif
  73. #if UNITY_ANDROID && UNITY_5_5_OR_NEWER
  74. [Tooltip(
  75. "Enable to use combined meshes to reduce draw calls. Currently only available on mobile devices. " +
  76. "Will be forced to false on PC.")]
  77. private bool CombineMeshes = true;
  78. #else
  79. private bool CombineMeshes = false;
  80. #endif
  81. [Tooltip(
  82. "Enable to use transparent queue, disable to use geometry queue. Requires restart to take effect.")]
  83. public bool UseTransparentRenderQueue = true;
  84. [Header("Shaders")]
  85. public Shader Monochrome_SurfaceShader;
  86. public Shader Monochrome_SurfaceShader_SelfOccluding;
  87. public Shader Monochrome_SurfaceShader_PBS;
  88. public Shader Skinshaded_SurfaceShader_SingleComponent;
  89. public Shader Skinshaded_VertFrag_SingleComponent;
  90. public Shader Skinshaded_VertFrag_CombinedMesh;
  91. public Shader Skinshaded_Expressive_SurfaceShader_SingleComponent;
  92. public Shader Skinshaded_Expressive_VertFrag_SingleComponent;
  93. public Shader Skinshaded_Expressive_VertFrag_CombinedMesh;
  94. public Shader Loader_VertFrag_CombinedMesh;
  95. public Shader EyeLens;
  96. public Shader ControllerShader;
  97. [Header("Other")]
  98. public bool CanOwnMicrophone = true;
  99. [Tooltip(
  100. "Enable laughter detection and animation as part of OVRLipSync.")]
  101. public bool EnableLaughter = true;
  102. public GameObject MouthAnchor;
  103. public Transform LeftHandCustomPose;
  104. public Transform RightHandCustomPose;
  105. // Avatar asset
  106. private HashSet<UInt64> assetLoadingIds = new HashSet<UInt64>();
  107. private bool assetsFinishedLoading = false;
  108. // Material manager
  109. private OvrAvatarMaterialManager materialManager;
  110. private bool waitingForCombinedMesh = false;
  111. // Global expressive system initialization
  112. private static bool doneExpressiveGlobalInit = false;
  113. // Clothing offsets
  114. private Vector4 clothingAlphaOffset = new Vector4(0f, 0f, 0f, 1f);
  115. private UInt64 clothingAlphaTexture = 0;
  116. // Lipsync
  117. private OVRLipSyncMicInput micInput = null;
  118. private OVRLipSyncContext lipsyncContext = null;
  119. private OVRLipSync.Frame currentFrame = new OVRLipSync.Frame();
  120. private float[] visemes = new float[VISEME_COUNT];
  121. private AudioSource audioSource;
  122. private ONSPAudioSource spatializedSource;
  123. private List<float[]> voiceUpdates = new List<float[]>();
  124. private static ovrAvatarVisemes RuntimeVisemes;
  125. // Custom hand poses
  126. private Transform cachedLeftHandCustomPose;
  127. private Transform[] cachedCustomLeftHandJoints;
  128. private ovrAvatarTransform[] cachedLeftHandTransforms;
  129. private Transform cachedRightHandCustomPose;
  130. private Transform[] cachedCustomRightHandJoints;
  131. private ovrAvatarTransform[] cachedRightHandTransforms;
  132. private bool showLeftController;
  133. private bool showRightController;
  134. // Consts
  135. #if UNITY_ANDROID
  136. private const bool USE_MOBILE_TEXTURE_FORMAT = true;
  137. #else
  138. private const bool USE_MOBILE_TEXTURE_FORMAT = false;
  139. #endif
  140. private static readonly Vector3 MOUTH_HEAD_OFFSET = new Vector3(0, -0.085f, 0.09f);
  141. private const string MOUTH_HELPER_NAME = "MouthAnchor";
  142. // Initial 'silence' score, 14 viseme scores and 1 laughter score as last element
  143. private const int VISEME_COUNT = 16;
  144. // Lipsync animation speeds
  145. private const float ACTION_UNIT_ONSET_SPEED = 30f;
  146. private const float ACTION_UNIT_FALLOFF_SPEED = 20f;
  147. private const float VISEME_LEVEL_MULTIPLIER = 1.5f;
  148. // Internals
  149. internal UInt64 oculusUserIDInternal;
  150. internal OvrAvatarBase Base = null;
  151. internal OvrAvatarTouchController ControllerLeft = null;
  152. internal OvrAvatarTouchController ControllerRight = null;
  153. internal OvrAvatarBody Body = null;
  154. internal OvrAvatarHand HandLeft = null;
  155. internal OvrAvatarHand HandRight = null;
  156. internal ovrAvatarLookAndFeelVersion LookAndFeelVersion = ovrAvatarLookAndFeelVersion.Two;
  157. internal ovrAvatarLookAndFeelVersion FallbackLookAndFeelVersion = ovrAvatarLookAndFeelVersion.Two;
  158. #if AVATAR_INTERNAL
  159. public AvatarControllerBlend BlendController;
  160. public UnityEvent AssetsDoneLoading = new UnityEvent();
  161. #endif
  162. // Avatar packets
  163. public class PacketEventArgs : EventArgs
  164. {
  165. public readonly OvrAvatarPacket Packet;
  166. public PacketEventArgs(OvrAvatarPacket packet)
  167. {
  168. Packet = packet;
  169. }
  170. }
  171. private OvrAvatarPacket CurrentUnityPacket;
  172. public EventHandler<PacketEventArgs> PacketRecorded;
  173. public enum HandType
  174. {
  175. Right,
  176. Left,
  177. Max
  178. };
  179. public enum HandJoint
  180. {
  181. HandBase,
  182. IndexBase,
  183. IndexTip,
  184. ThumbBase,
  185. ThumbTip,
  186. Max,
  187. }
  188. private static string[,] HandJoints = new string[(int)HandType.Max, (int)HandJoint.Max]
  189. {
  190. {
  191. "hands:r_hand_world",
  192. "hands:r_hand_world/hands:b_r_hand/hands:b_r_index1",
  193. "hands:r_hand_world/hands:b_r_hand/hands:b_r_index1/hands:b_r_index2/hands:b_r_index3/hands:b_r_index_ignore",
  194. "hands:r_hand_world/hands:b_r_hand/hands:b_r_thumb1/hands:b_r_thumb2",
  195. "hands:r_hand_world/hands:b_r_hand/hands:b_r_thumb1/hands:b_r_thumb2/hands:b_r_thumb3/hands:b_r_thumb_ignore"
  196. },
  197. {
  198. "hands:l_hand_world",
  199. "hands:l_hand_world/hands:b_l_hand/hands:b_l_index1",
  200. "hands:l_hand_world/hands:b_l_hand/hands:b_l_index1/hands:b_l_index2/hands:b_l_index3/hands:b_l_index_ignore",
  201. "hands:l_hand_world/hands:b_l_hand/hands:b_l_thumb1/hands:b_l_thumb2",
  202. "hands:l_hand_world/hands:b_l_hand/hands:b_l_thumb1/hands:b_l_thumb2/hands:b_l_thumb3/hands:b_l_thumb_ignore"
  203. }
  204. };
  205. static OvrAvatar()
  206. {
  207. // This size has to match the 'MarshalAs' attribute in the ovrAvatarVisemes declaration.
  208. RuntimeVisemes.visemeParams = new float[32];
  209. RuntimeVisemes.visemeParamCount = VISEME_COUNT;
  210. }
  211. void OnDestroy()
  212. {
  213. if (sdkAvatar != IntPtr.Zero)
  214. {
  215. CAPI.ovrAvatar_Destroy(sdkAvatar);
  216. }
  217. }
  218. public void AssetLoadedCallback(OvrAvatarAsset asset)
  219. {
  220. assetLoadingIds.Remove(asset.assetID);
  221. }
  222. public void CombinedMeshLoadedCallback(IntPtr assetPtr)
  223. {
  224. if (!waitingForCombinedMesh)
  225. {
  226. return;
  227. }
  228. var meshIDs = CAPI.ovrAvatarAsset_GetCombinedMeshIDs(assetPtr);
  229. foreach (var id in meshIDs)
  230. {
  231. assetLoadingIds.Remove(id);
  232. }
  233. CAPI.ovrAvatar_GetCombinedMeshAlphaData(sdkAvatar, ref clothingAlphaTexture, ref clothingAlphaOffset);
  234. waitingForCombinedMesh = false;
  235. }
  236. private OvrAvatarSkinnedMeshRenderComponent AddSkinnedMeshRenderComponent(GameObject gameObject, ovrAvatarRenderPart_SkinnedMeshRender skinnedMeshRender)
  237. {
  238. OvrAvatarSkinnedMeshRenderComponent skinnedMeshRenderer = gameObject.AddComponent<OvrAvatarSkinnedMeshRenderComponent>();
  239. skinnedMeshRenderer.Initialize(skinnedMeshRender, Monochrome_SurfaceShader, Monochrome_SurfaceShader_SelfOccluding, ThirdPersonLayer.layerIndex, FirstPersonLayer.layerIndex);
  240. return skinnedMeshRenderer;
  241. }
  242. private OvrAvatarSkinnedMeshRenderPBSComponent AddSkinnedMeshRenderPBSComponent(GameObject gameObject, ovrAvatarRenderPart_SkinnedMeshRenderPBS skinnedMeshRenderPBS)
  243. {
  244. OvrAvatarSkinnedMeshRenderPBSComponent skinnedMeshRenderer = gameObject.AddComponent<OvrAvatarSkinnedMeshRenderPBSComponent>();
  245. skinnedMeshRenderer.Initialize(skinnedMeshRenderPBS, Monochrome_SurfaceShader_PBS, ThirdPersonLayer.layerIndex, FirstPersonLayer.layerIndex);
  246. return skinnedMeshRenderer;
  247. }
  248. private OvrAvatarSkinnedMeshPBSV2RenderComponent AddSkinnedMeshRenderPBSV2Component(
  249. IntPtr renderPart,
  250. GameObject go,
  251. ovrAvatarRenderPart_SkinnedMeshRenderPBS_V2 skinnedMeshRenderPBSV2,
  252. bool isBodyPartZero,
  253. bool isControllerModel)
  254. {
  255. OvrAvatarSkinnedMeshPBSV2RenderComponent skinnedMeshRenderer = go.AddComponent<OvrAvatarSkinnedMeshPBSV2RenderComponent>();
  256. skinnedMeshRenderer.Initialize(
  257. renderPart,
  258. skinnedMeshRenderPBSV2,
  259. materialManager,
  260. ThirdPersonLayer.layerIndex,
  261. FirstPersonLayer.layerIndex,
  262. isBodyPartZero && CombineMeshes,
  263. LevelOfDetail,
  264. isBodyPartZero && EnableExpressive,
  265. this,
  266. isControllerModel);
  267. return skinnedMeshRenderer;
  268. }
  269. static public IntPtr GetRenderPart(ovrAvatarComponent component, UInt32 renderPartIndex)
  270. {
  271. return Marshal.ReadIntPtr(component.renderParts, Marshal.SizeOf(typeof(IntPtr)) * (int)renderPartIndex);
  272. }
  273. private static string GetRenderPartName(ovrAvatarComponent component, uint renderPartIndex)
  274. {
  275. return component.name + "_renderPart_" + (int)renderPartIndex;
  276. }
  277. internal static void ConvertTransform(float[] transform, ref ovrAvatarTransform target)
  278. {
  279. target.position.x = transform[0];
  280. target.position.y = transform[1];
  281. target.position.z = transform[2];
  282. target.orientation.x = transform[3];
  283. target.orientation.y = transform[4];
  284. target.orientation.z = transform[5];
  285. target.orientation.w = transform[6];
  286. target.scale.x = transform[7];
  287. target.scale.y = transform[8];
  288. target.scale.z = transform[9];
  289. }
  290. internal static void ConvertTransform(ovrAvatarTransform transform, Transform target)
  291. {
  292. Vector3 position = transform.position;
  293. position.z = -position.z;
  294. Quaternion orientation = transform.orientation;
  295. orientation.x = -orientation.x;
  296. orientation.y = -orientation.y;
  297. target.localPosition = position;
  298. target.localRotation = orientation;
  299. target.localScale = transform.scale;
  300. }
  301. public static ovrAvatarTransform CreateOvrAvatarTransform(Vector3 position, Quaternion orientation)
  302. {
  303. return new ovrAvatarTransform
  304. {
  305. position = new Vector3(position.x, position.y, -position.z),
  306. orientation = new Quaternion(-orientation.x, -orientation.y, orientation.z, orientation.w),
  307. scale = Vector3.one
  308. };
  309. }
  310. private static ovrAvatarGazeTarget CreateOvrGazeTarget(uint targetId, Vector3 targetPosition, ovrAvatarGazeTargetType targetType)
  311. {
  312. return new ovrAvatarGazeTarget
  313. {
  314. id = targetId,
  315. // Do coordinate system switch.
  316. worldPosition = new Vector3(targetPosition.x, targetPosition.y, -targetPosition.z),
  317. type = targetType
  318. };
  319. }
  320. private void BuildRenderComponents()
  321. {
  322. ovrAvatarBaseComponent baseComponnet = new ovrAvatarBaseComponent();
  323. ovrAvatarHandComponent leftHandComponnet = new ovrAvatarHandComponent();
  324. ovrAvatarHandComponent rightHandComponnet = new ovrAvatarHandComponent();
  325. ovrAvatarControllerComponent leftControllerComponent = new ovrAvatarControllerComponent();
  326. ovrAvatarControllerComponent rightControllerComponent = new ovrAvatarControllerComponent();
  327. ovrAvatarBodyComponent bodyComponent = new ovrAvatarBodyComponent();
  328. ovrAvatarComponent dummyComponent = new ovrAvatarComponent();
  329. const bool FetchName = true;
  330. if (CAPI.ovrAvatarPose_GetLeftHandComponent(sdkAvatar, ref leftHandComponnet))
  331. {
  332. CAPI.ovrAvatarComponent_Get(leftHandComponnet.renderComponent, FetchName, ref dummyComponent);
  333. AddAvatarComponent(ref HandLeft, dummyComponent);
  334. HandLeft.isLeftHand = true;
  335. }
  336. if (CAPI.ovrAvatarPose_GetRightHandComponent(sdkAvatar, ref rightHandComponnet))
  337. {
  338. CAPI.ovrAvatarComponent_Get(rightHandComponnet.renderComponent, FetchName, ref dummyComponent);
  339. AddAvatarComponent(ref HandRight, dummyComponent);
  340. HandRight.isLeftHand = false;
  341. }
  342. if (CAPI.ovrAvatarPose_GetBodyComponent(sdkAvatar, ref bodyComponent))
  343. {
  344. CAPI.ovrAvatarComponent_Get(bodyComponent.renderComponent, FetchName, ref dummyComponent);
  345. AddAvatarComponent(ref Body, dummyComponent);
  346. }
  347. if (CAPI.ovrAvatarPose_GetLeftControllerComponent(sdkAvatar, ref leftControllerComponent))
  348. {
  349. CAPI.ovrAvatarComponent_Get(leftControllerComponent.renderComponent, FetchName, ref dummyComponent);
  350. AddAvatarComponent(ref ControllerLeft, dummyComponent);
  351. ControllerLeft.isLeftHand = true;
  352. }
  353. if (CAPI.ovrAvatarPose_GetRightControllerComponent(sdkAvatar, ref rightControllerComponent))
  354. {
  355. CAPI.ovrAvatarComponent_Get(rightControllerComponent.renderComponent, FetchName, ref dummyComponent);
  356. AddAvatarComponent(ref ControllerRight, dummyComponent);
  357. ControllerRight.isLeftHand = false;
  358. }
  359. if (CAPI.ovrAvatarPose_GetBaseComponent(sdkAvatar, ref baseComponnet))
  360. {
  361. CAPI.ovrAvatarComponent_Get(baseComponnet.renderComponent, FetchName, ref dummyComponent);
  362. AddAvatarComponent(ref Base, dummyComponent);
  363. }
  364. }
  365. private void AddAvatarComponent<T>(ref T root, ovrAvatarComponent nativeComponent) where T : OvrAvatarComponent
  366. {
  367. GameObject componentObject = new GameObject();
  368. componentObject.name = nativeComponent.name;
  369. componentObject.transform.SetParent(transform);
  370. root = componentObject.AddComponent<T>();
  371. root.SetOvrAvatarOwner(this);
  372. AddRenderParts(root, nativeComponent, componentObject.transform);
  373. }
  374. void UpdateCustomPoses()
  375. {
  376. // Check to see if the pose roots changed
  377. if (UpdatePoseRoot(LeftHandCustomPose, ref cachedLeftHandCustomPose, ref cachedCustomLeftHandJoints, ref cachedLeftHandTransforms))
  378. {
  379. if (cachedLeftHandCustomPose == null && sdkAvatar != IntPtr.Zero)
  380. {
  381. CAPI.ovrAvatar_SetLeftHandGesture(sdkAvatar, ovrAvatarHandGesture.Default);
  382. }
  383. }
  384. if (UpdatePoseRoot(RightHandCustomPose, ref cachedRightHandCustomPose, ref cachedCustomRightHandJoints, ref cachedRightHandTransforms))
  385. {
  386. if (cachedRightHandCustomPose == null && sdkAvatar != IntPtr.Zero)
  387. {
  388. CAPI.ovrAvatar_SetRightHandGesture(sdkAvatar, ovrAvatarHandGesture.Default);
  389. }
  390. }
  391. // Check to see if the custom gestures need to be updated
  392. if (sdkAvatar != IntPtr.Zero)
  393. {
  394. if (cachedLeftHandCustomPose != null && UpdateTransforms(cachedCustomLeftHandJoints, cachedLeftHandTransforms))
  395. {
  396. CAPI.ovrAvatar_SetLeftHandCustomGesture(sdkAvatar, (uint)cachedLeftHandTransforms.Length, cachedLeftHandTransforms);
  397. }
  398. if (cachedRightHandCustomPose != null && UpdateTransforms(cachedCustomRightHandJoints, cachedRightHandTransforms))
  399. {
  400. CAPI.ovrAvatar_SetRightHandCustomGesture(sdkAvatar, (uint)cachedRightHandTransforms.Length, cachedRightHandTransforms);
  401. }
  402. }
  403. }
  404. static bool UpdatePoseRoot(Transform poseRoot, ref Transform cachedPoseRoot, ref Transform[] cachedPoseJoints, ref ovrAvatarTransform[] transforms)
  405. {
  406. if (poseRoot == cachedPoseRoot)
  407. {
  408. return false;
  409. }
  410. if (!poseRoot)
  411. {
  412. cachedPoseRoot = null;
  413. cachedPoseJoints = null;
  414. transforms = null;
  415. }
  416. else
  417. {
  418. List<Transform> joints = new List<Transform>();
  419. OrderJoints(poseRoot, joints);
  420. cachedPoseRoot = poseRoot;
  421. cachedPoseJoints = joints.ToArray();
  422. transforms = new ovrAvatarTransform[joints.Count];
  423. }
  424. return true;
  425. }
  426. static bool UpdateTransforms(Transform[] joints, ovrAvatarTransform[] transforms)
  427. {
  428. bool updated = false;
  429. for (int i = 0; i < joints.Length; ++i)
  430. {
  431. Transform joint = joints[i];
  432. ovrAvatarTransform transform = CreateOvrAvatarTransform(joint.localPosition, joint.localRotation);
  433. if (transform.position != transforms[i].position || transform.orientation != transforms[i].orientation)
  434. {
  435. transforms[i] = transform;
  436. updated = true;
  437. }
  438. }
  439. return updated;
  440. }
  441. private static void OrderJoints(Transform transform, List<Transform> joints)
  442. {
  443. joints.Add(transform);
  444. for (int i = 0; i < transform.childCount; ++i)
  445. {
  446. Transform child = transform.GetChild(i);
  447. OrderJoints(child, joints);
  448. }
  449. }
  450. void AvatarSpecificationCallback(IntPtr avatarSpecification)
  451. {
  452. sdkAvatar = CAPI.ovrAvatar_Create(avatarSpecification, Capabilities);
  453. ShowLeftController(showLeftController);
  454. ShowRightController(showRightController);
  455. // Pump the Remote driver once to push the controller type through
  456. if (Driver != null)
  457. {
  458. Driver.UpdateTransformsFromPose(sdkAvatar);
  459. }
  460. //Fetch all the assets that this avatar uses.
  461. UInt32 assetCount = CAPI.ovrAvatar_GetReferencedAssetCount(sdkAvatar);
  462. for (UInt32 i = 0; i < assetCount; ++i)
  463. {
  464. UInt64 id = CAPI.ovrAvatar_GetReferencedAsset(sdkAvatar, i);
  465. if (OvrAvatarSDKManager.Instance.GetAsset(id) == null)
  466. {
  467. OvrAvatarSDKManager.Instance.BeginLoadingAsset(
  468. id,
  469. LevelOfDetail,
  470. AssetLoadedCallback);
  471. assetLoadingIds.Add(id);
  472. }
  473. }
  474. if (CombineMeshes)
  475. {
  476. OvrAvatarSDKManager.Instance.RegisterCombinedMeshCallback(
  477. sdkAvatar,
  478. CombinedMeshLoadedCallback);
  479. }
  480. }
  481. void Start()
  482. {
  483. if (OvrAvatarSDKManager.Instance == null)
  484. {
  485. return;
  486. }
  487. #if !UNITY_ANDROID
  488. if (CombineMeshes)
  489. {
  490. CombineMeshes = false;
  491. AvatarLogger.Log("Combined Meshes currently only supported on mobile");
  492. }
  493. #endif
  494. #if !UNITY_5_5_OR_NEWER
  495. if (CombineMeshes)
  496. {
  497. CombineMeshes = false;
  498. AvatarLogger.LogWarning("Combined Meshes requires Unity 5.5.0+");
  499. }
  500. #endif
  501. materialManager = gameObject.AddComponent<OvrAvatarMaterialManager>();
  502. try
  503. {
  504. oculusUserIDInternal = UInt64.Parse(oculusUserID);
  505. }
  506. catch (Exception)
  507. {
  508. oculusUserIDInternal = 0;
  509. AvatarLogger.LogWarning("Invalid Oculus User ID Format");
  510. }
  511. // If no oculus ID is supplied then turn off combine meshes to prevent the texture arrays
  512. // being populated by invalid textures.
  513. if (oculusUserIDInternal == 0)
  514. {
  515. AvatarLogger.LogWarning("Oculus User ID set to 0. Provide actual user ID: " + gameObject.name);
  516. CombineMeshes = false;
  517. }
  518. AvatarLogger.Log("Starting OvrAvatar " + gameObject.name);
  519. AvatarLogger.Log(AvatarLogger.Tab + "LOD: " + LevelOfDetail.ToString());
  520. AvatarLogger.Log(AvatarLogger.Tab + "Combine Meshes: " + CombineMeshes);
  521. AvatarLogger.Log(AvatarLogger.Tab + "Force Mobile Textures: " + USE_MOBILE_TEXTURE_FORMAT);
  522. AvatarLogger.Log(AvatarLogger.Tab + "Oculus User ID: " + oculusUserIDInternal);
  523. Capabilities = 0;
  524. bool is3Dof = false;
  525. var headsetType = OVRPlugin.GetSystemHeadsetType();
  526. switch (headsetType)
  527. {
  528. case OVRPlugin.SystemHeadset.GearVR_R320:
  529. case OVRPlugin.SystemHeadset.GearVR_R321:
  530. case OVRPlugin.SystemHeadset.GearVR_R322:
  531. case OVRPlugin.SystemHeadset.GearVR_R323:
  532. case OVRPlugin.SystemHeadset.GearVR_R324:
  533. case OVRPlugin.SystemHeadset.GearVR_R325:
  534. case OVRPlugin.SystemHeadset.Oculus_Go:
  535. is3Dof = true;
  536. break;
  537. case OVRPlugin.SystemHeadset.Oculus_Quest:
  538. case OVRPlugin.SystemHeadset.Rift_S:
  539. case OVRPlugin.SystemHeadset.Rift_DK1:
  540. case OVRPlugin.SystemHeadset.Rift_DK2:
  541. case OVRPlugin.SystemHeadset.Rift_CV1:
  542. default:
  543. break;
  544. }
  545. // The SDK 3 DOF Arm Model requires the body skeleton to pose itself. It will crash without it
  546. // The likely use case here is trying to have an invisible body.
  547. // T45010595
  548. if (is3Dof && !EnableBody)
  549. {
  550. AvatarLogger.Log("Forcing the Body component for 3Dof hand tracking, and setting the visibility to 1st person");
  551. EnableBody = true;
  552. ShowFirstPerson = true;
  553. ShowThirdPerson = false;
  554. }
  555. if (EnableBody) Capabilities |= ovrAvatarCapabilities.Body;
  556. if (EnableHands) Capabilities |= ovrAvatarCapabilities.Hands;
  557. if (EnableBase && EnableBody) Capabilities |= ovrAvatarCapabilities.Base;
  558. if (EnableExpressive) Capabilities |= ovrAvatarCapabilities.Expressive;
  559. // Enable body tilt on 6dof devices
  560. if(OVRPlugin.positionSupported)
  561. {
  562. Capabilities |= ovrAvatarCapabilities.BodyTilt;
  563. }
  564. ShowLeftController(StartWithControllers);
  565. ShowRightController(StartWithControllers);
  566. OvrAvatarSDKManager.AvatarSpecRequestParams avatarSpecRequest = new OvrAvatarSDKManager.AvatarSpecRequestParams(
  567. oculusUserIDInternal,
  568. this.AvatarSpecificationCallback,
  569. CombineMeshes,
  570. LevelOfDetail,
  571. USE_MOBILE_TEXTURE_FORMAT,
  572. LookAndFeelVersion,
  573. FallbackLookAndFeelVersion,
  574. EnableExpressive);
  575. OvrAvatarSDKManager.Instance.RequestAvatarSpecification(avatarSpecRequest);
  576. OvrAvatarSDKManager.Instance.AddLoadingAvatar(GetInstanceID());
  577. waitingForCombinedMesh = CombineMeshes;
  578. if (Driver != null)
  579. {
  580. Driver.Mode = UseSDKPackets ? OvrAvatarDriver.PacketMode.SDK : OvrAvatarDriver.PacketMode.Unity;
  581. }
  582. }
  583. void Update()
  584. {
  585. if (!OvrAvatarSDKManager.Instance || sdkAvatar == IntPtr.Zero || materialManager == null)
  586. {
  587. return;
  588. }
  589. if (Driver != null)
  590. {
  591. Driver.UpdateTransforms(sdkAvatar);
  592. foreach (float[] voiceUpdate in voiceUpdates)
  593. {
  594. CAPI.ovrAvatarPose_UpdateVoiceVisualization(sdkAvatar, voiceUpdate);
  595. }
  596. voiceUpdates.Clear();
  597. #if AVATAR_INTERNAL
  598. if (BlendController != null)
  599. {
  600. BlendController.UpdateBlend(sdkAvatar);
  601. }
  602. #endif
  603. CAPI.ovrAvatarPose_Finalize(sdkAvatar, Time.deltaTime);
  604. }
  605. if (RecordPackets)
  606. {
  607. RecordFrame();
  608. }
  609. if (assetLoadingIds.Count == 0)
  610. {
  611. if (!assetsFinishedLoading)
  612. {
  613. try
  614. {
  615. BuildRenderComponents();
  616. }
  617. catch (Exception e)
  618. {
  619. assetsFinishedLoading = true;
  620. throw e; // rethrow the original exception to preserve callstack
  621. }
  622. #if AVATAR_INTERNAL
  623. AssetsDoneLoading.Invoke();
  624. #endif
  625. InitPostLoad();
  626. assetsFinishedLoading = true;
  627. OvrAvatarSDKManager.Instance.RemoveLoadingAvatar(GetInstanceID());
  628. }
  629. UpdateVoiceBehavior();
  630. UpdateCustomPoses();
  631. if (EnableExpressive)
  632. {
  633. UpdateExpressive();
  634. }
  635. }
  636. }
  637. public static ovrAvatarHandInputState CreateInputState(ovrAvatarTransform transform, OvrAvatarDriver.ControllerPose pose)
  638. {
  639. ovrAvatarHandInputState inputState = new ovrAvatarHandInputState();
  640. inputState.transform = transform;
  641. inputState.buttonMask = pose.buttons;
  642. inputState.touchMask = pose.touches;
  643. inputState.joystickX = pose.joystickPosition.x;
  644. inputState.joystickY = pose.joystickPosition.y;
  645. inputState.indexTrigger = pose.indexTrigger;
  646. inputState.handTrigger = pose.handTrigger;
  647. inputState.isActive = pose.isActive;
  648. return inputState;
  649. }
  650. public void ShowControllers(bool show)
  651. {
  652. ShowLeftController(show);
  653. ShowRightController(show);
  654. }
  655. public void ShowLeftController(bool show)
  656. {
  657. if (sdkAvatar != IntPtr.Zero)
  658. {
  659. CAPI.ovrAvatar_SetLeftControllerVisibility(sdkAvatar, show);
  660. }
  661. showLeftController = show;
  662. }
  663. public void ShowRightController(bool show)
  664. {
  665. if (sdkAvatar != IntPtr.Zero)
  666. {
  667. CAPI.ovrAvatar_SetRightControllerVisibility(sdkAvatar, show);
  668. }
  669. showRightController = show;
  670. }
  671. public void UpdateVoiceVisualization(float[] voiceSamples)
  672. {
  673. voiceUpdates.Add(voiceSamples);
  674. }
  675. void RecordFrame()
  676. {
  677. if(UseSDKPackets)
  678. {
  679. RecordSDKFrame();
  680. }
  681. else
  682. {
  683. RecordUnityFrame();
  684. }
  685. }
  686. // Meant to be used mutually exclusively with RecordSDKFrame to give user more options to optimize or tweak packet data
  687. private void RecordUnityFrame()
  688. {
  689. var deltaSeconds = Time.deltaTime;
  690. var frame = Driver.GetCurrentPose();
  691. // If this is our first packet, store the pose as the initial frame
  692. if (CurrentUnityPacket == null)
  693. {
  694. CurrentUnityPacket = new OvrAvatarPacket(frame);
  695. deltaSeconds = 0;
  696. }
  697. float recordedSeconds = 0;
  698. while (recordedSeconds < deltaSeconds)
  699. {
  700. float remainingSeconds = deltaSeconds - recordedSeconds;
  701. float remainingPacketSeconds = PacketSettings.UpdateRate - CurrentUnityPacket.Duration;
  702. // If we're not going to fill the packet, just add the frame
  703. if (remainingSeconds < remainingPacketSeconds)
  704. {
  705. CurrentUnityPacket.AddFrame(frame, remainingSeconds);
  706. recordedSeconds += remainingSeconds;
  707. }
  708. // If we're going to fill the packet, interpolate the pose, send the packet,
  709. // and open a new one
  710. else
  711. {
  712. // Interpolate between the packet's last frame and our target pose
  713. // to compute a pose at the end of the packet time.
  714. OvrAvatarDriver.PoseFrame a = CurrentUnityPacket.FinalFrame;
  715. OvrAvatarDriver.PoseFrame b = frame;
  716. float t = remainingPacketSeconds / remainingSeconds;
  717. OvrAvatarDriver.PoseFrame intermediatePose = OvrAvatarDriver.PoseFrame.Interpolate(a, b, t);
  718. CurrentUnityPacket.AddFrame(intermediatePose, remainingPacketSeconds);
  719. recordedSeconds += remainingPacketSeconds;
  720. // Broadcast the recorded packet
  721. if (PacketRecorded != null)
  722. {
  723. PacketRecorded(this, new PacketEventArgs(CurrentUnityPacket));
  724. }
  725. // Open a new packet
  726. CurrentUnityPacket = new OvrAvatarPacket(intermediatePose);
  727. }
  728. }
  729. }
  730. private void RecordSDKFrame()
  731. {
  732. if (sdkAvatar == IntPtr.Zero)
  733. {
  734. return;
  735. }
  736. if (!PacketSettings.RecordingFrames)
  737. {
  738. CAPI.ovrAvatarPacket_BeginRecording(sdkAvatar);
  739. PacketSettings.AccumulatedTime = 0.0f;
  740. PacketSettings.RecordingFrames = true;
  741. }
  742. PacketSettings.AccumulatedTime += Time.deltaTime;
  743. if (PacketSettings.AccumulatedTime >= PacketSettings.UpdateRate)
  744. {
  745. PacketSettings.AccumulatedTime = 0.0f;
  746. var packet = CAPI.ovrAvatarPacket_EndRecording(sdkAvatar);
  747. CAPI.ovrAvatarPacket_BeginRecording(sdkAvatar);
  748. if (PacketRecorded != null)
  749. {
  750. PacketRecorded(this, new PacketEventArgs(new OvrAvatarPacket { ovrNativePacket = packet }));
  751. }
  752. CAPI.ovrAvatarPacket_Free(packet);
  753. }
  754. }
  755. private void AddRenderParts(
  756. OvrAvatarComponent ovrComponent,
  757. ovrAvatarComponent component,
  758. Transform parent)
  759. {
  760. bool isBody = ovrComponent.name == "body";
  761. bool isLeftController = ovrComponent.name == "controller_left";
  762. bool isReftController = ovrComponent.name == "controller_right";
  763. for (UInt32 renderPartIndex = 0; renderPartIndex < component.renderPartCount; renderPartIndex++)
  764. {
  765. GameObject renderPartObject = new GameObject();
  766. renderPartObject.name = GetRenderPartName(component, renderPartIndex);
  767. renderPartObject.transform.SetParent(parent);
  768. IntPtr renderPart = GetRenderPart(component, renderPartIndex);
  769. ovrAvatarRenderPartType type = CAPI.ovrAvatarRenderPart_GetType(renderPart);
  770. OvrAvatarRenderComponent ovrRenderPart = null;
  771. switch (type)
  772. {
  773. case ovrAvatarRenderPartType.SkinnedMeshRender:
  774. ovrRenderPart = AddSkinnedMeshRenderComponent(renderPartObject, CAPI.ovrAvatarRenderPart_GetSkinnedMeshRender(renderPart));
  775. break;
  776. case ovrAvatarRenderPartType.SkinnedMeshRenderPBS:
  777. ovrRenderPart = AddSkinnedMeshRenderPBSComponent(renderPartObject, CAPI.ovrAvatarRenderPart_GetSkinnedMeshRenderPBS(renderPart));
  778. break;
  779. case ovrAvatarRenderPartType.SkinnedMeshRenderPBS_V2:
  780. {
  781. ovrRenderPart = AddSkinnedMeshRenderPBSV2Component(
  782. renderPart,
  783. renderPartObject,
  784. CAPI.ovrAvatarRenderPart_GetSkinnedMeshRenderPBSV2(renderPart),
  785. isBody && renderPartIndex == 0,
  786. isLeftController || isReftController);
  787. }
  788. break;
  789. default:
  790. break;
  791. }
  792. if (ovrRenderPart != null)
  793. {
  794. ovrComponent.RenderParts.Add(ovrRenderPart);
  795. }
  796. }
  797. }
  798. public void RefreshBodyParts()
  799. {
  800. if (Body != null)
  801. {
  802. foreach (var part in Body.RenderParts)
  803. {
  804. Destroy(part.gameObject);
  805. }
  806. Body.RenderParts.Clear();
  807. var nativeAvatarComponent = Body.GetNativeAvatarComponent();
  808. if (nativeAvatarComponent.HasValue)
  809. {
  810. AddRenderParts(Body, nativeAvatarComponent.Value, Body.gameObject.transform);
  811. }
  812. }
  813. }
  814. public ovrAvatarBodyComponent? GetBodyComponent()
  815. {
  816. if (Body != null)
  817. {
  818. CAPI.ovrAvatarPose_GetBodyComponent(sdkAvatar, ref Body.component);
  819. return Body.component;
  820. }
  821. return null;
  822. }
  823. public Transform GetHandTransform(HandType hand, HandJoint joint)
  824. {
  825. if (hand >= HandType.Max || joint >= HandJoint.Max)
  826. {
  827. return null;
  828. }
  829. var HandObject = hand == HandType.Left ? HandLeft : HandRight;
  830. if (HandObject != null)
  831. {
  832. var AvatarComponent = HandObject.GetComponent<OvrAvatarComponent>();
  833. if (AvatarComponent != null && AvatarComponent.RenderParts.Count > 0)
  834. {
  835. var SkinnedMesh = AvatarComponent.RenderParts[0];
  836. return SkinnedMesh.transform.Find(HandJoints[(int)hand, (int)joint]);
  837. }
  838. }
  839. return null;
  840. }
  841. public void GetPointingDirection(HandType hand, ref Vector3 forward, ref Vector3 up)
  842. {
  843. Transform handBase = GetHandTransform(hand, HandJoint.HandBase);
  844. if (handBase != null)
  845. {
  846. forward = handBase.forward;
  847. up = handBase.up;
  848. }
  849. }
  850. static Vector3 MOUTH_POSITION_OFFSET = new Vector3(0, -0.018f, 0.1051f);
  851. static string VOICE_PROPERTY = "_Voice";
  852. static string MOUTH_POSITION_PROPERTY = "_MouthPosition";
  853. static string MOUTH_DIRECTION_PROPERTY = "_MouthDirection";
  854. static string MOUTH_SCALE_PROPERTY = "_MouthEffectScale";
  855. static float MOUTH_SCALE_GLOBAL = 0.007f;
  856. static float MOUTH_MAX_GLOBAL = 0.007f;
  857. static string NECK_JONT = "root_JNT/body_JNT/chest_JNT/neckBase_JNT/neck_JNT";
  858. public float VoiceAmplitude = 0f;
  859. public bool EnableMouthVertexAnimation = false;
  860. private void UpdateVoiceBehavior()
  861. {
  862. if (!EnableMouthVertexAnimation)
  863. {
  864. return;
  865. }
  866. if (Body != null)
  867. {
  868. OvrAvatarComponent component = Body.GetComponent<OvrAvatarComponent>();
  869. VoiceAmplitude = Mathf.Clamp(VoiceAmplitude, 0f, 1f);
  870. if (component.RenderParts.Count > 0)
  871. {
  872. var material = component.RenderParts[0].mesh.sharedMaterial;
  873. var neckJoint = component.RenderParts[0].mesh.transform.Find(NECK_JONT);
  874. var scaleDiff = neckJoint.TransformPoint(Vector3.up) - neckJoint.position;
  875. material.SetFloat(MOUTH_SCALE_PROPERTY, scaleDiff.magnitude);
  876. material.SetFloat(
  877. VOICE_PROPERTY,
  878. Mathf.Min(scaleDiff.magnitude * MOUTH_MAX_GLOBAL, scaleDiff.magnitude * VoiceAmplitude * MOUTH_SCALE_GLOBAL));
  879. material.SetVector(
  880. MOUTH_POSITION_PROPERTY,
  881. neckJoint.TransformPoint(MOUTH_POSITION_OFFSET));
  882. material.SetVector(MOUTH_DIRECTION_PROPERTY, neckJoint.up);
  883. }
  884. }
  885. }
  886. bool IsValidMic()
  887. {
  888. string[] devices = Microphone.devices;
  889. if (devices.Length < 1)
  890. {
  891. return false;
  892. }
  893. int selectedDeviceIndex = 0;
  894. #if UNITY_STANDALONE_WIN
  895. for (int i = 1; i < devices.Length; i++)
  896. {
  897. if (devices[i].ToUpper().Contains("RIFT"))
  898. {
  899. selectedDeviceIndex = i;
  900. break;
  901. }
  902. }
  903. #endif
  904. string selectedDevice = devices[selectedDeviceIndex];
  905. int minFreq;
  906. int maxFreq;
  907. Microphone.GetDeviceCaps(selectedDevice, out minFreq, out maxFreq);
  908. if (maxFreq == 0)
  909. {
  910. maxFreq = 44100;
  911. }
  912. AudioClip clip = Microphone.Start(selectedDevice, true, 1, maxFreq);
  913. if (clip == null)
  914. {
  915. return false;
  916. }
  917. Microphone.End(selectedDevice);
  918. return true;
  919. }
  920. void InitPostLoad()
  921. {
  922. ExpressiveGlobalInit();
  923. ConfigureHelpers();
  924. if (GetComponent<OvrAvatarLocalDriver>() != null)
  925. {
  926. // Use mic.
  927. lipsyncContext.audioLoopback = false;
  928. if (CanOwnMicrophone && IsValidMic())
  929. {
  930. micInput = MouthAnchor.gameObject.AddComponent<OVRLipSyncMicInput>();
  931. micInput.enableMicSelectionGUI = false;
  932. micInput.MicFrequency = 44100;
  933. micInput.micControl = OVRLipSyncMicInput.micActivation.ConstantSpeak;
  934. }
  935. // Set lipsync animation parameters in SDK
  936. CAPI.ovrAvatar_SetActionUnitOnsetSpeed(sdkAvatar, ACTION_UNIT_ONSET_SPEED);
  937. CAPI.ovrAvatar_SetActionUnitFalloffSpeed(sdkAvatar, ACTION_UNIT_FALLOFF_SPEED);
  938. CAPI.ovrAvatar_SetVisemeMultiplier(sdkAvatar, VISEME_LEVEL_MULTIPLIER);
  939. }
  940. }
  941. static ovrAvatarLights ovrLights = new ovrAvatarLights();
  942. static void ExpressiveGlobalInit()
  943. {
  944. if (doneExpressiveGlobalInit)
  945. {
  946. return;
  947. }
  948. doneExpressiveGlobalInit = true;
  949. // This array size has to match the 'MarshalAs' attribute in the ovrAvatarLights declaration.
  950. const int MAXSIZE = 16;
  951. ovrLights.lights = new ovrAvatarLight[MAXSIZE];
  952. InitializeLights();
  953. }
  954. static void InitializeLights()
  955. {
  956. // Set light info. Lights are shared across all avatar instances.
  957. ovrLights.ambientIntensity = RenderSettings.ambientLight.grayscale * 0.5f;
  958. Light[] sceneLights = FindObjectsOfType(typeof(Light)) as Light[];
  959. int i = 0;
  960. for (i = 0; i < sceneLights.Length && i < ovrLights.lights.Length; ++i)
  961. {
  962. Light sceneLight = sceneLights[i];
  963. if (sceneLight && sceneLight.enabled)
  964. {
  965. uint instanceID = (uint)sceneLight.transform.GetInstanceID();
  966. switch (sceneLight.type)
  967. {
  968. case LightType.Directional:
  969. {
  970. CreateLightDirectional(instanceID, sceneLight.transform.forward, sceneLight.intensity, ref ovrLights.lights[i]);
  971. break;
  972. }
  973. case LightType.Point:
  974. {
  975. CreateLightPoint(instanceID, sceneLight.transform.position, sceneLight.range, sceneLight.intensity, ref ovrLights.lights[i]);
  976. break;
  977. }
  978. case LightType.Spot:
  979. {
  980. CreateLightSpot(instanceID, sceneLight.transform.position, sceneLight.transform.forward, sceneLight.spotAngle, sceneLight.range, sceneLight.intensity, ref ovrLights.lights[i]);
  981. break;
  982. }
  983. }
  984. }
  985. }
  986. ovrLights.lightCount = (uint)i;
  987. CAPI.ovrAvatar_UpdateLights(ovrLights);
  988. }
  989. static ovrAvatarLight CreateLightDirectional(uint id, Vector3 direction, float intensity, ref ovrAvatarLight light)
  990. {
  991. light.id = id;
  992. light.type = ovrAvatarLightType.Direction;
  993. light.worldDirection = new Vector3(direction.x, direction.y, -direction.z);
  994. light.intensity = intensity;
  995. return light;
  996. }
  997. static ovrAvatarLight CreateLightPoint(uint id, Vector3 position, float range, float intensity, ref ovrAvatarLight light)
  998. {
  999. light.id = id;
  1000. light.type = ovrAvatarLightType.Point;
  1001. light.worldPosition = new Vector3(position.x, position.y, -position.z);
  1002. light.range = range;
  1003. light.intensity = intensity;
  1004. return light;
  1005. }
  1006. static ovrAvatarLight CreateLightSpot(uint id, Vector3 position, Vector3 direction, float spotAngleDeg, float range, float intensity, ref ovrAvatarLight light)
  1007. {
  1008. light.id = id;
  1009. light.type = ovrAvatarLightType.Spot;
  1010. light.worldPosition = new Vector3(position.x, position.y, -position.z);
  1011. light.worldDirection = new Vector3(direction.x, direction.y, -direction.z);
  1012. light.spotAngleDeg = spotAngleDeg;
  1013. light.range = range;
  1014. light.intensity = intensity;
  1015. return light;
  1016. }
  1017. void UpdateExpressive()
  1018. {
  1019. ovrAvatarTransform baseTransform = OvrAvatar.CreateOvrAvatarTransform(transform.position, transform.rotation);
  1020. CAPI.ovrAvatar_UpdateWorldTransform(sdkAvatar, baseTransform);
  1021. UpdateFacewave();
  1022. }
  1023. private void ConfigureHelpers()
  1024. {
  1025. Transform head =
  1026. transform.Find("body/body_renderPart_0/root_JNT/body_JNT/chest_JNT/neckBase_JNT/neck_JNT/head_JNT");
  1027. if (head == null)
  1028. {
  1029. AvatarLogger.LogError("Avatar helper config failed. Cannot find head transform. All helpers spawning on root avatar transform");
  1030. head = transform;
  1031. }
  1032. if (MouthAnchor == null)
  1033. {
  1034. MouthAnchor = CreateHelperObject(head, MOUTH_HEAD_OFFSET, MOUTH_HELPER_NAME);
  1035. }
  1036. if (GetComponent<OvrAvatarLocalDriver>() != null)
  1037. {
  1038. if (audioSource == null)
  1039. {
  1040. audioSource = MouthAnchor.gameObject.AddComponent<AudioSource>();
  1041. }
  1042. spatializedSource = MouthAnchor.GetComponent<ONSPAudioSource>();
  1043. if (spatializedSource == null)
  1044. {
  1045. spatializedSource = MouthAnchor.gameObject.AddComponent<ONSPAudioSource>();
  1046. }
  1047. spatializedSource.UseInvSqr = true;
  1048. spatializedSource.EnableRfl = false;
  1049. spatializedSource.EnableSpatialization = true;
  1050. spatializedSource.Far = 100f;
  1051. spatializedSource.Near = 0.1f;
  1052. // Add phoneme context to the mouth anchor
  1053. lipsyncContext = MouthAnchor.GetComponent<OVRLipSyncContext>();
  1054. if (lipsyncContext == null)
  1055. {
  1056. lipsyncContext = MouthAnchor.gameObject.AddComponent<OVRLipSyncContext>();
  1057. }
  1058. lipsyncContext.provider = EnableLaughter
  1059. ? OVRLipSync.ContextProviders.Enhanced_with_Laughter
  1060. : OVRLipSync.ContextProviders.Enhanced;
  1061. // Ignore audio callback if microphone is owned by VoIP
  1062. lipsyncContext.skipAudioSource = !CanOwnMicrophone;
  1063. StartCoroutine(WaitForMouthAudioSource());
  1064. }
  1065. if (GetComponent<OvrAvatarRemoteDriver>() != null)
  1066. {
  1067. GazeTarget headTarget = head.gameObject.AddComponent<GazeTarget>();
  1068. headTarget.Type = ovrAvatarGazeTargetType.AvatarHead;
  1069. AvatarLogger.Log("Added head as gaze target");
  1070. Transform hand = transform.Find("hand_left");
  1071. if (hand == null)
  1072. {
  1073. AvatarLogger.LogWarning("Gaze target helper config failed: Cannot find left hand transform");
  1074. }
  1075. else
  1076. {
  1077. GazeTarget handTarget = hand.gameObject.AddComponent<GazeTarget>();
  1078. handTarget.Type = ovrAvatarGazeTargetType.AvatarHand;
  1079. AvatarLogger.Log("Added left hand as gaze target");
  1080. }
  1081. hand = transform.Find("hand_right");
  1082. if (hand == null)
  1083. {
  1084. AvatarLogger.Log("Gaze target helper config failed: Cannot find right hand transform");
  1085. }
  1086. else
  1087. {
  1088. GazeTarget handTarget = hand.gameObject.AddComponent<GazeTarget>();
  1089. handTarget.Type = ovrAvatarGazeTargetType.AvatarHand;
  1090. AvatarLogger.Log("Added right hand as gaze target");
  1091. }
  1092. }
  1093. }
  1094. private IEnumerator WaitForMouthAudioSource()
  1095. {
  1096. while (MouthAnchor.GetComponent<AudioSource>() == null)
  1097. {
  1098. yield return new WaitForSeconds(0.1f);
  1099. }
  1100. AudioSource AS = MouthAnchor.GetComponent<AudioSource>();
  1101. AS.minDistance = 0.3f;
  1102. AS.maxDistance = 4f;
  1103. AS.rolloffMode = AudioRolloffMode.Logarithmic;
  1104. AS.loop = true;
  1105. AS.playOnAwake = true;
  1106. AS.spatialBlend = 1.0f;
  1107. AS.spatialize = true;
  1108. AS.spatializePostEffects = true;
  1109. }
  1110. public void DestroyHelperObjects()
  1111. {
  1112. if (MouthAnchor)
  1113. {
  1114. DestroyImmediate(MouthAnchor.gameObject);
  1115. }
  1116. }
  1117. public GameObject CreateHelperObject(Transform parent, Vector3 localPositionOffset, string helperName,
  1118. string helperTag = "")
  1119. {
  1120. GameObject helper = new GameObject();
  1121. helper.name = helperName;
  1122. if (helperTag != "")
  1123. {
  1124. helper.tag = helperTag;
  1125. }
  1126. helper.transform.SetParent(parent);
  1127. helper.transform.localRotation = Quaternion.identity;
  1128. helper.transform.localPosition = localPositionOffset;
  1129. return helper;
  1130. }
  1131. public void UpdateVoiceData(short[] pcmData, int numChannels)
  1132. {
  1133. if (lipsyncContext != null && micInput == null)
  1134. {
  1135. lipsyncContext.ProcessAudioSamplesRaw(pcmData, numChannels);
  1136. }
  1137. }
  1138. public void UpdateVoiceData(float[] pcmData, int numChannels)
  1139. {
  1140. if (lipsyncContext != null && micInput == null)
  1141. {
  1142. lipsyncContext.ProcessAudioSamplesRaw(pcmData, numChannels);
  1143. }
  1144. }
  1145. private void UpdateFacewave()
  1146. {
  1147. if (lipsyncContext != null && (micInput != null || CanOwnMicrophone == false))
  1148. {
  1149. // Get the current viseme frame
  1150. currentFrame = lipsyncContext.GetCurrentPhonemeFrame();
  1151. // Verify length (-1 for laughter)
  1152. if (currentFrame.Visemes.Length != (VISEME_COUNT - 1))
  1153. {
  1154. Debug.LogError("Unexpected number of visemes " + currentFrame.Visemes);
  1155. return;
  1156. }
  1157. // Copy to viseme array
  1158. currentFrame.Visemes.CopyTo(visemes, 0);
  1159. // Copy laughter as final element
  1160. visemes[VISEME_COUNT - 1] = EnableLaughter ? currentFrame.laughterScore : 0.0f;
  1161. // Send visemes to native implementation.
  1162. for (int i = 0; i < VISEME_COUNT; i++)
  1163. {
  1164. RuntimeVisemes.visemeParams[i] = visemes[i];
  1165. }
  1166. CAPI.ovrAvatar_SetVisemes(sdkAvatar, RuntimeVisemes);
  1167. }
  1168. }
  1169. }