AudioFrameObserverNative.cs 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Runtime.InteropServices;
  4. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  5. using AOT;
  6. #endif
  7. namespace Agora.Rtc
  8. {
  9. internal static class AudioFrameObserverNative
  10. {
  11. internal static OBSERVER_MODE mode = OBSERVER_MODE.INTPTR;
  12. internal static IAudioFrameObserver AudioFrameObserver;
  13. private static class LocalAudioFrames
  14. {
  15. internal static AudioFrame RecordAudioFrame = new AudioFrame();
  16. internal static AudioFrame PlaybackAudioFrame = new AudioFrame();
  17. internal static AudioFrame MixedAudioFrame = new AudioFrame();
  18. internal static Dictionary<string, Dictionary<uint, AudioFrame>> AudioFrameBeforeMixingEx =
  19. new Dictionary<string, Dictionary<uint, AudioFrame>>();
  20. internal static Dictionary<string, Dictionary<string, AudioFrame>> AudioFrameBeforeMixingEx2 =
  21. new Dictionary<string, Dictionary<string, AudioFrame>>();
  22. internal static IrisAudioParams irisAudioParams = new IrisAudioParams();
  23. }
  24. private static AudioFrame ProcessAudioFrameReceived(IntPtr audioFramePtr, string channelId, uint uid)
  25. {
  26. var audioFrame = (IrisAudioFrame)(Marshal.PtrToStructure(audioFramePtr, typeof(IrisAudioFrame)) ??
  27. new IrisAudioFrame());
  28. AudioFrame localAudioFrame = null;
  29. if (channelId == "")
  30. {
  31. switch (uid)
  32. {
  33. //Local Audio Frame
  34. case 0:
  35. localAudioFrame = LocalAudioFrames.RecordAudioFrame;
  36. break;
  37. case 1:
  38. localAudioFrame = LocalAudioFrames.PlaybackAudioFrame;
  39. break;
  40. case 2:
  41. localAudioFrame = LocalAudioFrames.MixedAudioFrame;
  42. break;
  43. }
  44. }
  45. else
  46. {
  47. //Remote Audio Frame
  48. if (!LocalAudioFrames.AudioFrameBeforeMixingEx.ContainsKey(channelId))
  49. {
  50. LocalAudioFrames.AudioFrameBeforeMixingEx[channelId] = new Dictionary<uint, AudioFrame>();
  51. LocalAudioFrames.AudioFrameBeforeMixingEx[channelId][uid] = new AudioFrame();
  52. }
  53. else if (!LocalAudioFrames.AudioFrameBeforeMixingEx[channelId].ContainsKey(uid))
  54. {
  55. LocalAudioFrames.AudioFrameBeforeMixingEx[channelId][uid] = new AudioFrame();
  56. }
  57. localAudioFrame = LocalAudioFrames.AudioFrameBeforeMixingEx[channelId][uid];
  58. }
  59. if (mode == OBSERVER_MODE.RAW_DATA)
  60. {
  61. if (localAudioFrame.channels != audioFrame.channels ||
  62. localAudioFrame.samplesPerChannel != audioFrame.samples ||
  63. localAudioFrame.bytesPerSample != (BYTES_PER_SAMPLE)audioFrame.bytes_per_sample)
  64. {
  65. localAudioFrame.RawBuffer = new byte[audioFrame.buffer_length];
  66. }
  67. if (audioFrame.buffer != IntPtr.Zero)
  68. Marshal.Copy(audioFrame.buffer, localAudioFrame.RawBuffer, 0, (int)audioFrame.buffer_length);
  69. }
  70. localAudioFrame.type = audioFrame.type;
  71. localAudioFrame.samplesPerChannel = audioFrame.samples;
  72. localAudioFrame.bufferPtr = audioFrame.buffer;
  73. localAudioFrame.bytesPerSample = (BYTES_PER_SAMPLE)audioFrame.bytes_per_sample;
  74. localAudioFrame.channels = audioFrame.channels;
  75. localAudioFrame.samplesPerSec = audioFrame.samples_per_sec;
  76. localAudioFrame.renderTimeMs = audioFrame.render_time_ms;
  77. localAudioFrame.avsync_type = audioFrame.av_sync_type;
  78. return localAudioFrame;
  79. }
  80. private static AudioFrame ProcessAudioFrameReceived(IntPtr audioFramePtr, string channelId, string uid)
  81. {
  82. var audioFrame = (IrisAudioFrame)(Marshal.PtrToStructure(audioFramePtr, typeof(IrisAudioFrame)) ??
  83. new IrisAudioFrame());
  84. AudioFrame localAudioFrame = null;
  85. //Remote Audio Frame
  86. if (!LocalAudioFrames.AudioFrameBeforeMixingEx2.ContainsKey(channelId))
  87. {
  88. LocalAudioFrames.AudioFrameBeforeMixingEx2[channelId] = new Dictionary<string, AudioFrame>();
  89. LocalAudioFrames.AudioFrameBeforeMixingEx2[channelId][uid] = new AudioFrame();
  90. }
  91. else if (!LocalAudioFrames.AudioFrameBeforeMixingEx2[channelId].ContainsKey(uid))
  92. {
  93. LocalAudioFrames.AudioFrameBeforeMixingEx2[channelId][uid] = new AudioFrame();
  94. }
  95. localAudioFrame = LocalAudioFrames.AudioFrameBeforeMixingEx2[channelId][uid];
  96. if (mode == OBSERVER_MODE.RAW_DATA)
  97. {
  98. if (localAudioFrame.channels != audioFrame.channels ||
  99. localAudioFrame.samplesPerChannel != audioFrame.samples ||
  100. localAudioFrame.bytesPerSample != (BYTES_PER_SAMPLE)audioFrame.bytes_per_sample)
  101. {
  102. localAudioFrame.RawBuffer = new byte[audioFrame.buffer_length];
  103. }
  104. if (audioFrame.buffer != IntPtr.Zero)
  105. Marshal.Copy(audioFrame.buffer, localAudioFrame.RawBuffer, 0, (int)audioFrame.buffer_length);
  106. }
  107. localAudioFrame.type = audioFrame.type;
  108. localAudioFrame.samplesPerChannel = audioFrame.samples;
  109. localAudioFrame.bufferPtr = audioFrame.buffer;
  110. localAudioFrame.bytesPerSample = (BYTES_PER_SAMPLE)audioFrame.bytes_per_sample;
  111. localAudioFrame.channels = audioFrame.channels;
  112. localAudioFrame.samplesPerSec = audioFrame.samples_per_sec;
  113. localAudioFrame.renderTimeMs = audioFrame.render_time_ms;
  114. localAudioFrame.avsync_type = audioFrame.av_sync_type;
  115. return localAudioFrame;
  116. }
  117. private static IrisAudioParams ProcessAudioParams(AudioParams audioParams)
  118. {
  119. LocalAudioFrames.irisAudioParams.sample_rate = audioParams.sample_rate;
  120. LocalAudioFrames.irisAudioParams.channels = audioParams.channels;
  121. LocalAudioFrames.irisAudioParams.mode = (IRIS_RAW_AUDIO_FRAME_OP_MODE_TYPE)audioParams.mode;
  122. LocalAudioFrames.irisAudioParams.samples_per_call = audioParams.samples_per_call;
  123. return LocalAudioFrames.irisAudioParams;
  124. }
  125. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  126. [MonoPInvokeCallback(typeof(Func_AudioFrameLocal_Native))]
  127. #endif
  128. internal static bool OnRecordAudioFrame(string channelId, IntPtr audioFramePtr)
  129. {
  130. if (AudioFrameObserver == null)
  131. return true;
  132. var audioFrame = ProcessAudioFrameReceived(audioFramePtr, "", 0);
  133. try
  134. {
  135. return AudioFrameObserver.OnRecordAudioFrame(channelId, audioFrame);
  136. }
  137. catch (Exception e)
  138. {
  139. AgoraLog.LogError("[Exception] IAudioFrameObserver.OnRecordAudioFrame: " + e);
  140. return true;
  141. }
  142. }
  143. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  144. [MonoPInvokeCallback(typeof(Func_AudioFrameLocal_Native))]
  145. #endif
  146. internal static bool OnPlaybackAudioFrame(string channelId, IntPtr audioFramePtr)
  147. {
  148. if (AudioFrameObserver == null)
  149. return true;
  150. var audioFrame = ProcessAudioFrameReceived(audioFramePtr, "", 1);
  151. try
  152. {
  153. return AudioFrameObserver.OnPlaybackAudioFrame(channelId, audioFrame);
  154. }
  155. catch (Exception e)
  156. {
  157. AgoraLog.LogError("[Exception] IAudioFrameObserver.OnPlaybackAudioFrame: " + e);
  158. return true;
  159. }
  160. }
  161. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  162. [MonoPInvokeCallback(typeof(Func_AudioFrameLocal_Native))]
  163. #endif
  164. internal static bool OnMixedAudioFrame(string channelId, IntPtr audioFramePtr)
  165. {
  166. if (AudioFrameObserver == null)
  167. return true;
  168. var audioFrame = ProcessAudioFrameReceived(audioFramePtr, "", 2);
  169. try
  170. {
  171. return AudioFrameObserver.OnMixedAudioFrame(channelId, audioFrame);
  172. }
  173. catch (Exception e)
  174. {
  175. AgoraLog.LogError("[Exception] IAudioFrameObserver.OnMixedAudioFrame: " + e);
  176. return true;
  177. }
  178. }
  179. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  180. [MonoPInvokeCallback(typeof(Func_AudioFramePosition_Native))]
  181. #endif
  182. internal static int GetObservedAudioFramePosition()
  183. {
  184. if (AudioFrameObserver == null)
  185. return (int)AUDIO_FRAME_POSITION.AUDIO_FRAME_POSITION_NONE;
  186. try
  187. {
  188. return AudioFrameObserver.GetObservedAudioFramePosition();
  189. }
  190. catch (Exception e)
  191. {
  192. AgoraLog.LogError("[Exception] IAudioFrameObserver.GetObservedAudioFramePosition: " + e);
  193. return (int)AUDIO_FRAME_POSITION.AUDIO_FRAME_POSITION_NONE;
  194. }
  195. }
  196. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  197. [MonoPInvokeCallback(typeof(Func_AudioParams_Native))]
  198. #endif
  199. internal static IrisAudioParams GetPlaybackAudioParams()
  200. {
  201. if (AudioFrameObserver == null)
  202. return LocalAudioFrames.irisAudioParams;
  203. try
  204. {
  205. return ProcessAudioParams(AudioFrameObserver.GetPlaybackAudioParams());
  206. }
  207. catch (Exception e)
  208. {
  209. AgoraLog.LogError("[Exception] IAudioFrameObserver.GetPlaybackAudioParams: " + e);
  210. return LocalAudioFrames.irisAudioParams;
  211. }
  212. }
  213. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  214. [MonoPInvokeCallback(typeof(Func_AudioParams_Native))]
  215. #endif
  216. internal static IrisAudioParams GetRecordAudioParams()
  217. {
  218. if (AudioFrameObserver == null)
  219. return LocalAudioFrames.irisAudioParams;
  220. try
  221. {
  222. return ProcessAudioParams(AudioFrameObserver.GetRecordAudioParams());
  223. }
  224. catch (Exception e)
  225. {
  226. AgoraLog.LogError("[Exception] IAudioFrameObserver.GetRecordAudioParams: " + e);
  227. return LocalAudioFrames.irisAudioParams;
  228. }
  229. }
  230. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  231. [MonoPInvokeCallback(typeof(Func_AudioParams_Native))]
  232. #endif
  233. internal static IrisAudioParams GetMixedAudioParams()
  234. {
  235. if (AudioFrameObserver == null)
  236. return LocalAudioFrames.irisAudioParams;
  237. try
  238. {
  239. return ProcessAudioParams(AudioFrameObserver.GetMixedAudioParams());
  240. }
  241. catch (Exception e)
  242. {
  243. AgoraLog.LogError("[Exception] IAudioFrameObserver.GetMixedAudioParams: " + e);
  244. return LocalAudioFrames.irisAudioParams;
  245. }
  246. }
  247. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  248. [MonoPInvokeCallback(typeof(Func_AudioFrameRemote_Native))]
  249. #endif
  250. internal static bool OnPlaybackAudioFrameBeforeMixing(string channelId, uint uid, IntPtr audioFramePtr)
  251. {
  252. if (AudioFrameObserver == null)
  253. return true;
  254. var audioFrame = ProcessAudioFrameReceived(audioFramePtr, channelId, uid);
  255. try
  256. {
  257. return AudioFrameObserver.OnPlaybackAudioFrameBeforeMixing(channelId, uid, audioFrame);
  258. }
  259. catch (Exception e)
  260. {
  261. AgoraLog.LogError("[Exception] IAudioFrameObserver.OnPlaybackAudioFrameBeforeMixing: " + e);
  262. return true;
  263. }
  264. }
  265. #if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
  266. [MonoPInvokeCallback(typeof(Func_AudioFrameRemoteStringUid_Native))]
  267. #endif
  268. internal static bool OnPlaybackAudioFrameBeforeMixing2(string channelId, string uid, IntPtr audioFramePtr)
  269. {
  270. if (AudioFrameObserver == null)
  271. return true;
  272. var audioFrame = ProcessAudioFrameReceived(audioFramePtr, channelId, uid);
  273. try
  274. {
  275. return AudioFrameObserver.OnPlaybackAudioFrameBeforeMixing(channelId, uid, audioFrame);
  276. }
  277. catch (Exception e)
  278. {
  279. AgoraLog.LogError("[Exception] IAudioFrameObserver.OnPlaybackAudioFrameBeforeMixing2: " + e);
  280. return true;
  281. }
  282. }
  283. }
  284. }