Assignment for RMIT Mixed Reality in 2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

441 lines
15 KiB

  1. /************************************************************************************
  2. Filename : OVRLipSync.cs
  3. Content : Interface to Oculus Lip Sync engine
  4. Created : August 4th, 2015
  5. Copyright : Copyright Facebook Technologies, LLC and its affiliates.
  6. All rights reserved.
  7. Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
  8. you may not use the Oculus Audio SDK except in compliance with the License,
  9. which is provided at the time of installation or download, or which
  10. otherwise accompanies this software in either electronic or hard copy form.
  11. You may obtain a copy of the License at
  12. https://developer.oculus.com/licenses/audio-3.3/
  13. Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
  14. distributed under the License is distributed on an "AS IS" BASIS,
  15. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16. See the License for the specific language governing permissions and
  17. limitations under the License.
  18. ************************************************************************************/
  19. using UnityEngine;
  20. using System;
  21. using System.Runtime.InteropServices;
  22. //-------------------------------------------------------------------------------------
  23. // ***** OVRLipSync
  24. //
  25. /// <summary>
  26. /// OVRLipSync interfaces into the Oculus lip sync engine. This component should be added
  27. /// into the scene once.
  28. ///
  29. /// </summary>
  30. public class OVRLipSync : MonoBehaviour
  31. {
  32. // Error codes that may return from Lip Sync engine
  33. public enum Result
  34. {
  35. Success = 0,
  36. Unknown = -2200, //< An unknown error has occurred
  37. CannotCreateContext = -2201, //< Unable to create a context
  38. InvalidParam = -2202, //< An invalid parameter, e.g. NULL pointer or out of range
  39. BadSampleRate = -2203, //< An unsupported sample rate was declared
  40. MissingDLL = -2204, //< The DLL or shared library could not be found
  41. BadVersion = -2205, //< Mismatched versions between header and libs
  42. UndefinedFunction = -2206 //< An undefined function
  43. };
  44. // Audio buffer data type
  45. public enum AudioDataType
  46. {
  47. // Signed 16-bit integer mono audio stream
  48. S16_Mono,
  49. // Signed 16-bit integer stereo audio stream
  50. S16_Stereo,
  51. // Signed 32-bit float mono audio stream
  52. F32_Mono,
  53. // Signed 32-bit float stereo audio stream
  54. F32_Stereo
  55. };
  56. // Various visemes
  57. public enum Viseme
  58. {
  59. sil,
  60. PP,
  61. FF,
  62. TH,
  63. DD,
  64. kk,
  65. CH,
  66. SS,
  67. nn,
  68. RR,
  69. aa,
  70. E,
  71. ih,
  72. oh,
  73. ou
  74. };
  75. public static readonly int VisemeCount = Enum.GetNames(typeof(Viseme)).Length;
  76. // Enum for sending lip-sync engine specific signals
  77. public enum Signals
  78. {
  79. VisemeOn,
  80. VisemeOff,
  81. VisemeAmount,
  82. VisemeSmoothing,
  83. LaughterAmount
  84. };
  85. public static readonly int SignalCount = Enum.GetNames(typeof(Signals)).Length;
  86. // Enum for provider context to create
  87. public enum ContextProviders
  88. {
  89. Original,
  90. Enhanced,
  91. Enhanced_with_Laughter,
  92. };
  93. /// NOTE: Opaque typedef for lip-sync context is an unsigned int (uint)
  94. /// Current phoneme frame results
  95. [System.Serializable]
  96. public class Frame
  97. {
  98. public void CopyInput(Frame input)
  99. {
  100. frameNumber = input.frameNumber;
  101. frameDelay = input.frameDelay;
  102. input.Visemes.CopyTo(Visemes, 0);
  103. laughterScore = input.laughterScore;
  104. }
  105. public void Reset()
  106. {
  107. frameNumber = 0;
  108. frameDelay = 0;
  109. Array.Clear(Visemes, 0, VisemeCount);
  110. laughterScore = 0;
  111. }
  112. public int frameNumber; // count from start of recognition
  113. public int frameDelay; // in ms
  114. public float[] Visemes = new float[VisemeCount]; // Array of floats for viseme frame. Size of Viseme Count, above
  115. public float laughterScore; // probability of laughter presence.
  116. };
  117. // * * * * * * * * * * * * *
  118. // Import functions
  119. #if !UNITY_IOS || UNITY_EDITOR
  120. public const string strOVRLS = "OVRLipSync";
  121. #else
  122. public const string strOVRLS = "__Internal";
  123. #endif
  124. [DllImport(strOVRLS)]
  125. private static extern int ovrLipSyncDll_Initialize(int samplerate, int buffersize);
  126. [DllImport(strOVRLS)]
  127. private static extern void ovrLipSyncDll_Shutdown();
  128. [DllImport(strOVRLS)]
  129. private static extern IntPtr ovrLipSyncDll_GetVersion(ref int Major,
  130. ref int Minor,
  131. ref int Patch);
  132. [DllImport(strOVRLS)]
  133. private static extern int ovrLipSyncDll_CreateContextEx(ref uint context,
  134. ContextProviders provider,
  135. int sampleRate,
  136. bool enableAcceleration);
  137. [DllImport(strOVRLS)]
  138. private static extern int ovrLipSyncDll_CreateContextWithModelFile(ref uint context,
  139. ContextProviders provider,
  140. string modelPath,
  141. int sampleRate,
  142. bool enableAcceleration);
  143. [DllImport(strOVRLS)]
  144. private static extern int ovrLipSyncDll_DestroyContext(uint context);
  145. [DllImport(strOVRLS)]
  146. private static extern int ovrLipSyncDll_ResetContext(uint context);
  147. [DllImport(strOVRLS)]
  148. private static extern int ovrLipSyncDll_SendSignal(uint context,
  149. Signals signal,
  150. int arg1, int arg2);
  151. [DllImport(strOVRLS)]
  152. private static extern int ovrLipSyncDll_ProcessFrameEx(
  153. uint context,
  154. IntPtr audioBuffer,
  155. uint bufferSize,
  156. AudioDataType dataType,
  157. ref int frameNumber,
  158. ref int frameDelay,
  159. float[] visemes,
  160. int visemeCount,
  161. ref float laughterScore,
  162. float[] laughterCategories,
  163. int laughterCategoriesLength);
  164. // * * * * * * * * * * * * *
  165. // Public members
  166. // * * * * * * * * * * * * *
  167. // Static members
  168. private static Result sInitialized = Result.Unknown;
  169. // interface through this static member.
  170. public static OVRLipSync sInstance = null;
  171. // * * * * * * * * * * * * *
  172. // MonoBehaviour overrides
  173. /// <summary>
  174. /// Awake this instance.
  175. /// </summary>
  176. void Awake()
  177. {
  178. // We can only have one instance of OVRLipSync in a scene (use this for local property query)
  179. if (sInstance == null)
  180. {
  181. sInstance = this;
  182. }
  183. else
  184. {
  185. Debug.LogWarning(System.String.Format("OVRLipSync Awake: Only one instance of OVRPLipSync can exist in the scene."));
  186. return;
  187. }
  188. if (IsInitialized() != Result.Success)
  189. {
  190. sInitialized = Initialize();
  191. if (sInitialized != Result.Success)
  192. {
  193. Debug.LogWarning(System.String.Format
  194. ("OvrLipSync Awake: Failed to init Speech Rec library"));
  195. }
  196. }
  197. // Important: Use the touchpad mechanism for input, call Create on the OVRTouchpad helper class
  198. OVRTouchpad.Create();
  199. }
  200. /// <summary>
  201. /// Raises the destroy event.
  202. /// </summary>
  203. void OnDestroy()
  204. {
  205. if (sInstance != this)
  206. {
  207. Debug.LogWarning(
  208. "OVRLipSync OnDestroy: This is not the correct OVRLipSync instance.");
  209. return;
  210. }
  211. // Do not shut down at this time
  212. // ovrLipSyncDll_Shutdown();
  213. // sInitialized = (int)Result.Unknown;
  214. }
  215. // * * * * * * * * * * * * *
  216. // Public Functions
  217. public static Result Initialize()
  218. {
  219. int sampleRate;
  220. int bufferSize;
  221. int numbuf;
  222. // Get the current sample rate
  223. sampleRate = AudioSettings.outputSampleRate;
  224. // Get the current buffer size and number of buffers
  225. AudioSettings.GetDSPBufferSize(out bufferSize, out numbuf);
  226. String str = System.String.Format
  227. ("OvrLipSync Awake: Queried SampleRate: {0:F0} BufferSize: {1:F0}", sampleRate, bufferSize);
  228. Debug.LogWarning(str);
  229. sInitialized = (Result)ovrLipSyncDll_Initialize(sampleRate, bufferSize);
  230. return sInitialized;
  231. }
  232. public static Result Initialize(int sampleRate, int bufferSize)
  233. {
  234. String str = System.String.Format
  235. ("OvrLipSync Awake: Queried SampleRate: {0:F0} BufferSize: {1:F0}", sampleRate, bufferSize);
  236. Debug.LogWarning(str);
  237. sInitialized = (Result)ovrLipSyncDll_Initialize(sampleRate, bufferSize);
  238. return sInitialized;
  239. }
  240. public static void Shutdown()
  241. {
  242. ovrLipSyncDll_Shutdown();
  243. sInitialized = Result.Unknown;
  244. }
  245. /// <summary>
  246. /// Determines if is initialized.
  247. /// </summary>
  248. /// <returns><c>true</c> if is initialized; otherwise, <c>false</c>.</returns>
  249. public static Result IsInitialized()
  250. {
  251. return sInitialized;
  252. }
  253. /// <summary>
  254. /// Creates a lip-sync context.
  255. /// </summary>
  256. /// <returns>error code</returns>
  257. /// <param name="context">Context.</param>
  258. /// <param name="provider">Provider.</param>
  259. /// <param name="enableAcceleration">Enable DSP Acceleration.</param>
  260. public static Result CreateContext(
  261. ref uint context,
  262. ContextProviders provider,
  263. int sampleRate = 0,
  264. bool enableAcceleration = false)
  265. {
  266. if (IsInitialized() != Result.Success && Initialize() != Result.Success)
  267. return Result.CannotCreateContext;
  268. return (Result)ovrLipSyncDll_CreateContextEx(ref context, provider, sampleRate, enableAcceleration);
  269. }
  270. /// <summary>
  271. /// Creates a lip-sync context with specified model file.
  272. /// </summary>
  273. /// <returns>error code</returns>
  274. /// <param name="context">Context.</param>
  275. /// <param name="provider">Provider.</param>
  276. /// <param name="modelPath">Model Dir.</param>
  277. /// <param name="sampleRate">Sampling Rate.</param>
  278. /// <param name="enableAcceleration">Enable DSP Acceleration.</param>
  279. public static Result CreateContextWithModelFile(
  280. ref uint context,
  281. ContextProviders provider,
  282. string modelPath,
  283. int sampleRate = 0,
  284. bool enableAcceleration = false)
  285. {
  286. if (IsInitialized() != Result.Success && Initialize() != Result.Success)
  287. return Result.CannotCreateContext;
  288. return (Result)ovrLipSyncDll_CreateContextWithModelFile(
  289. ref context,
  290. provider,
  291. modelPath,
  292. sampleRate,
  293. enableAcceleration);
  294. }
  295. /// <summary>
  296. /// Destroy a lip-sync context.
  297. /// </summary>
  298. /// <returns>The context.</returns>
  299. /// <param name="context">Context.</param>
  300. public static Result DestroyContext(uint context)
  301. {
  302. if (IsInitialized() != Result.Success)
  303. return Result.Unknown;
  304. return (Result)ovrLipSyncDll_DestroyContext(context);
  305. }
  306. /// <summary>
  307. /// Resets the context.
  308. /// </summary>
  309. /// <returns>error code</returns>
  310. /// <param name="context">Context.</param>
  311. public static Result ResetContext(uint context)
  312. {
  313. if (IsInitialized() != Result.Success)
  314. return Result.Unknown;
  315. return (Result)ovrLipSyncDll_ResetContext(context);
  316. }
  317. /// <summary>
  318. /// Sends a signal to the lip-sync engine.
  319. /// </summary>
  320. /// <returns>error code</returns>
  321. /// <param name="context">Context.</param>
  322. /// <param name="signal">Signal.</param>
  323. /// <param name="arg1">Arg1.</param>
  324. /// <param name="arg2">Arg2.</param>
  325. public static Result SendSignal(uint context, Signals signal, int arg1, int arg2)
  326. {
  327. if (IsInitialized() != Result.Success)
  328. return Result.Unknown;
  329. return (Result)ovrLipSyncDll_SendSignal(context, signal, arg1, arg2);
  330. }
  331. /// <summary>
  332. /// Process float[] audio buffer by lip-sync engine.
  333. /// </summary>
  334. /// <returns>error code</returns>
  335. /// <param name="context">Context.</param>
  336. /// <param name="audioBuffer"> PCM audio buffer.</param>
  337. /// <param name="frame">Lip-sync Frame.</param>
  338. /// <param name="stereo">Whether buffer is part of stereo or mono stream.</param>
  339. public static Result ProcessFrame(
  340. uint context, float[] audioBuffer, Frame frame, bool stereo = true)
  341. {
  342. if (IsInitialized() != Result.Success)
  343. return Result.Unknown;
  344. var dataType = stereo ? AudioDataType.F32_Stereo : AudioDataType.F32_Mono;
  345. var numSamples = (uint)(stereo ? audioBuffer.Length / 2 : audioBuffer.Length);
  346. var handle = GCHandle.Alloc(audioBuffer, GCHandleType.Pinned);
  347. var rc = ovrLipSyncDll_ProcessFrameEx(context,
  348. handle.AddrOfPinnedObject(), numSamples, dataType,
  349. ref frame.frameNumber, ref frame.frameDelay,
  350. frame.Visemes, frame.Visemes.Length,
  351. ref frame.laughterScore,
  352. null, 0
  353. );
  354. handle.Free();
  355. return (Result)rc;
  356. }
  357. /// <summary>
  358. /// Process short[] audio buffer by lip-sync engine.
  359. /// </summary>
  360. /// <returns>error code</returns>
  361. /// <param name="context">Context.</param>
  362. /// <param name="audioBuffer"> PCM audio buffer.</param>
  363. /// <param name="frame">Lip-sync Frame.</param>
  364. /// <param name="stereo">Whether buffer is part of stereo or mono stream.</param>
  365. public static Result ProcessFrame(
  366. uint context, short[] audioBuffer, Frame frame, bool stereo = true)
  367. {
  368. if (IsInitialized() != Result.Success)
  369. return Result.Unknown;
  370. var dataType = stereo ? AudioDataType.S16_Stereo : AudioDataType.S16_Mono;
  371. var numSamples = (uint)(stereo ? audioBuffer.Length / 2 : audioBuffer.Length);
  372. var handle = GCHandle.Alloc(audioBuffer, GCHandleType.Pinned);
  373. var rc = ovrLipSyncDll_ProcessFrameEx(context,
  374. handle.AddrOfPinnedObject(), numSamples, dataType,
  375. ref frame.frameNumber, ref frame.frameDelay,
  376. frame.Visemes, frame.Visemes.Length,
  377. ref frame.laughterScore,
  378. null, 0
  379. );
  380. handle.Free();
  381. return (Result)rc;
  382. }
  383. }