XunFeiYuYin.cs 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573
  1. using System;
  2. using System.Collections;
  3. using System.Collections.Generic;
  4. using System.Net.WebSockets;
  5. using System.Security.Cryptography;
  6. using System.Text;
  7. using System.Threading;
  8. using UnityEngine;
  9. using UnityEngine.Android;
  10. using UnityEngine.Networking;
  11. //using ZenFulcrum.EmbeddedBrowser;
  12. public class XunFeiYuYin : MonoBehaviour
  13. {
  14. public enum Voice
  15. {
  16. xiaoyan,//小燕,甜美女声,普通话
  17. aisjiuxu,//许久,亲切男声,普通话
  18. aisxping,//小萍知性女声,普通话
  19. aisjinger,//小婧,亲切女声,普通话
  20. aisbabyxu,//许小宝,可爱童声,普通话
  21. //以上为基础发音人下面的是付费的英语特色发音人,后台没开通是不能用的
  22. catherine,//Catherine英文女声
  23. john,//John英文男声
  24. }
  25. string APPID = "5c81de59";
  26. string APISecret = "ea4d5e9b06f8cfb0deae4d5360e7f8a7";
  27. string APIKey = "94348d7a6d5f3807176cb1f4923efa5c";
  28. public string 语音评测APIKey = "c6ea43c9e7b14d163bdeb4e51d2e564d";
  29. public static XunFeiYuYin yuyin;
  30. public event Action<string> 语音识别完成事件; //语音识别回调事件
  31. public AudioClip RecordedClip;
  32. ClientWebSocket 语音识别WebSocket;
  33. private void Awake()
  34. {
  35. if (yuyin == null)
  36. {
  37. yuyin = this;
  38. }
  39. else
  40. {
  41. Destroy(gameObject);
  42. return;
  43. }
  44. DontDestroyOnLoad(gameObject);
  45. }
  46. public static XunFeiYuYin Init(String appkey, string APISecret, string APIKey,string 语音评测APIKey)
  47. {
  48. string name = "讯飞语音";
  49. if (yuyin == null)
  50. {
  51. GameObject g = new GameObject(name);
  52. g.AddComponent<XunFeiYuYin>();
  53. }
  54. if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
  55. {
  56. Permission.RequestUserPermission(Permission.Microphone);
  57. }
  58. yuyin.APPID = appkey;
  59. yuyin.APISecret = APISecret;
  60. yuyin.APIKey = APIKey;
  61. yuyin.语音评测APIKey = 语音评测APIKey;
  62. //if (!yuyin.讯飞语音加)Debug.LogWarning("未安装或正确设置讯飞语音+将使用在线收费版讯飞引擎");
  63. //yuyin.javaClass.CallStatic("设置语音识别参数", new object[] { "language", "zh_cn" });//设置语音识别为中文
  64. return yuyin;
  65. }
  66. string GetUrl(string uriStr)
  67. {
  68. Uri uri = new Uri(uriStr);
  69. string date = DateTime.Now.ToString("r");
  70. string signature_origin = string.Format("host: " + uri.Host + "\ndate: " + date + "\nGET " + uri.AbsolutePath + " HTTP/1.1");
  71. HMACSHA256 mac = new HMACSHA256(Encoding.UTF8.GetBytes(APISecret));
  72. string signature = Convert.ToBase64String(mac.ComputeHash(Encoding.UTF8.GetBytes(signature_origin)));
  73. string authorization_origin = string.Format("api_key=\"{0}\",algorithm=\"hmac-sha256\",headers=\"host date request-line\",signature=\"{1}\"", APIKey, signature);
  74. string authorization = Convert.ToBase64String(Encoding.UTF8.GetBytes(authorization_origin));
  75. string url = string.Format("{0}?authorization={1}&date={2}&host={3}", uri, authorization, date, uri.Host);
  76. return url;
  77. }
  78. #region 语音识别
  79. public void 开始语音识别()
  80. {
  81. if (语音识别WebSocket!=null && 语音识别WebSocket.State == WebSocketState.Open)
  82. {
  83. Debug.LogWarning("开始语音识别失败!,等待上次识别连接结束");
  84. return;
  85. }
  86. 连接语音识别WebSocket();
  87. RecordedClip = Microphone.Start(null, false, 60, 16000);
  88. }
  89. public IEnumerator 停止语音识别()
  90. {
  91. Microphone.End(null);
  92. yield return new WaitUntil(()=>语音识别WebSocket.State != WebSocketState.Open);
  93. Debug.Log(message: "识别结束,停止录音");
  94. }
  95. async void 连接语音识别WebSocket()
  96. {
  97. using (语音识别WebSocket = new ClientWebSocket())
  98. {
  99. CancellationToken ct = new CancellationToken();
  100. Uri url = new Uri(GetUrl("wss://iat-api.xfyun.cn/v2/iat"));
  101. await 语音识别WebSocket.ConnectAsync(url, ct);
  102. Debug.Log("连接成功");
  103. StartCoroutine(发送录音数据流(语音识别WebSocket));
  104. StringBuilder stringBuilder = new StringBuilder();
  105. while (语音识别WebSocket.State == WebSocketState.Open)
  106. {
  107. var result = new byte[4096];
  108. await 语音识别WebSocket.ReceiveAsync(new ArraySegment<byte>(result), ct);//接受数据
  109. List<byte> list = new List<byte>(result);while (list[list.Count - 1] == 0x00) list.RemoveAt(list.Count - 1);//去除空字节
  110. string str = Encoding.UTF8.GetString(list.ToArray());
  111. Debug.Log("接收消息:" + str);
  112. if (string.IsNullOrEmpty(str))
  113. {
  114. return;
  115. }
  116. 识别data data = JsonUtility.FromJson<识别data>(str);
  117. stringBuilder.Append(获取识别单词(data));
  118. int status = data.data.status;
  119. if (status == 2)
  120. {
  121. 语音识别WebSocket.Abort();
  122. }
  123. }
  124. Debug.LogWarning("断开连接");
  125. string s = stringBuilder.ToString();
  126. if (!string.IsNullOrEmpty(s))
  127. {
  128. 语音识别完成事件?.Invoke(s);
  129. Debug.LogWarning("识别到声音:" + s);
  130. }
  131. }
  132. }
  133. [Serializable]
  134. public class 识别data
  135. {
  136. [Serializable]
  137. public class Data
  138. {
  139. [Serializable]
  140. public class Result
  141. {
  142. [Serializable]
  143. public class Ws
  144. {
  145. [Serializable]
  146. public class Cw
  147. {
  148. public string w;
  149. }
  150. public Cw[] cw;
  151. }
  152. public Ws[] ws;
  153. }
  154. public int status;
  155. public Result result;
  156. }
  157. public Data data;
  158. }
  159. string 获取识别单词(识别data data)
  160. {
  161. StringBuilder stringBuilder = new StringBuilder();
  162. var ws = data.data.result.ws;
  163. foreach (var item in ws)
  164. {
  165. var cw = item.cw;
  166. foreach (var w in cw)
  167. {
  168. stringBuilder.Append(w.w);
  169. }
  170. }
  171. return stringBuilder.ToString();
  172. }
  173. void 发送数据(byte[] audio, int status, ClientWebSocket socket)
  174. {
  175. if (socket.State != WebSocketState.Open)
  176. {
  177. return;
  178. }
  179. string audioStr = audio==null ?"": Convert.ToBase64String(audio);
  180. string message = "{\"common\":{\"app_id\":\"" + APPID + "\"},\"business\":{\"language\":\"zh_cn\",\"domain\":\"iat\",\"accent\":\"mandarin\",\"vad_eos\":2000}," +
  181. "\"data\":{\"status\":" + status + ",\"encoding\":\"raw\",\"format\":\"audio/L16;rate=16000\",\"audio\":\""+ audioStr + "\"}}";
  182. Debug.Log("发送消息:" + message);
  183. socket.SendAsync(new ArraySegment<byte>(Encoding.UTF8.GetBytes(message)), WebSocketMessageType.Binary, true, new CancellationToken()); //发送数据
  184. }
  185. IEnumerator 发送录音数据流(ClientWebSocket socket)
  186. {
  187. yield return new WaitWhile(() => Microphone.GetPosition(null) <= 0);
  188. float t = 0;
  189. int position = Microphone.GetPosition(null);
  190. const float waitTime = 0.04f;//每隔40ms发送音频
  191. int status = 0;
  192. int lastPosition = 0;
  193. const int Maxlength = 640;//最大发送长度
  194. while (position < RecordedClip.samples && socket.State == WebSocketState.Open)
  195. {
  196. t += waitTime;
  197. yield return new WaitForSecondsRealtime(waitTime);
  198. if (Microphone.IsRecording(null)) position = Microphone.GetPosition(null);
  199. Debug.Log("录音时长:" + t + "position=" + position + ",lastPosition=" + lastPosition);
  200. if (position <= lastPosition)
  201. {
  202. Debug.LogWarning("字节流发送完毕!强制结束!");
  203. break;
  204. }
  205. int length = position - lastPosition > Maxlength ? Maxlength : position - lastPosition;
  206. byte[] date = 获取音频流片段(lastPosition, length, RecordedClip);
  207. 发送数据(date, status, socket);
  208. lastPosition = lastPosition + length;
  209. status = 1;
  210. }
  211. 发送数据(null, 2, socket);
  212. //WebSocket.CloseAsync(WebSocketCloseStatus.NormalClosure, "关闭WebSocket连接",new CancellationToken());
  213. Microphone.End(null);
  214. }
  215. public static byte[] 获取音频流片段(int star, int length, AudioClip recordedClip)
  216. {
  217. float[] soundata = new float[length];
  218. recordedClip.GetData(soundata, star);
  219. int rescaleFactor = 32767;
  220. byte[] outData = new byte[soundata.Length * 2];
  221. for (int i = 0; i < soundata.Length; i++)
  222. {
  223. short temshort = (short)(soundata[i] * rescaleFactor);
  224. byte[] temdata = BitConverter.GetBytes(temshort);
  225. outData[i * 2] = temdata[0];
  226. outData[i * 2 + 1] = temdata[1];
  227. }
  228. return outData;
  229. }
  230. #endregion
  231. #region 语音合成
  232. public AudioSource 语音合成流播放器;
  233. ClientWebSocket 语音合成WebSocket;
  234. int 语音流总长度 = 0;
  235. Queue<float> 播放队列 = new Queue<float>();
  236. Queue<string> strQueue = new Queue<string>();
  237. /// <summary>
  238. /// 开始语音合成
  239. /// </summary>
  240. /// <param name="s">需要合成的字符串</param>
  241. /// <param name="voice">发音人,默认是xiaoyan</param>
  242. /// <param name="speed">语速,范围是0~100,默认是50</param>
  243. /// <param name="volume">音量,范围是0~100,默认50</param>
  244. public IEnumerator 开始语音合成(String text, Voice voice = Voice.xiaoyan, int speed = 50, int volume = 50)
  245. {
  246. if (语音合成WebSocket != null)
  247. {
  248. 语音合成WebSocket.Abort();
  249. }
  250. if (语音合成流播放器==null)
  251. {
  252. 语音合成流播放器 = gameObject.AddComponent<AudioSource>();
  253. }
  254. 语音合成流播放器.Stop();
  255. 连接语音合成WebSocket(GetUrl("wss://tts-api.xfyun.cn/v2/tts"),text, voice.ToString(),speed,volume);
  256. //语音合成流播放器.clip = clip;
  257. //语音合成流播放器.Play();//播放语音流
  258. while (true)
  259. {
  260. yield return null;
  261. if (语音合成流播放器!=null&&!语音合成流播放器.isPlaying || 语音合成WebSocket.State == WebSocketState.Aborted && 语音合成流播放器.timeSamples >= 语音流总长度)
  262. {
  263. HttpsSendLog.Instance.SendLog("Log", "语音流播放完毕");
  264. Debug.Log(text + "语音流播放完毕!");
  265. 语音合成流播放器.Stop();
  266. break;
  267. }
  268. }
  269. }
  270. private Queue<AudioClip> qAudioClips = new Queue<AudioClip>();
  271. public void stop()
  272. {
  273. isStart = false;
  274. if (语音合成WebSocket != null)
  275. {
  276. 语音合成WebSocket.Abort();
  277. }
  278. Destroy(gameObject.GetComponent<AudioSource>());
  279. 语音合成流播放器 = null;
  280. strQueue.Clear();
  281. 播放队列.Clear();
  282. qAudioClips.Clear();
  283. accLength = 0;
  284. }
  285. private string str;
  286. public void startQueue(string text)
  287. {
  288. //if (qAudioClips.Count > 0)
  289. // qAudioClips.Clear();
  290. //qAudioClips = new Queue<AudioClip>();
  291. ERNIEBotManager.Instance.isAudioPlay = true;
  292. //for (int i = 0; i < text.Length; i++)
  293. //{
  294. // str += text[i];
  295. // if ((i+1) % 80 == 0)
  296. // {
  297. // strQueue.Enqueue(str);
  298. // str = "";
  299. // }
  300. //}
  301. //if (str != "")
  302. //{
  303. //strQueue.Enqueue(str);
  304. // str = "";
  305. //}
  306. strQueue.Enqueue(text);
  307. }
  308. bool isStart = false;
  309. float times = 0;
  310. float accLength = 0;
  311. private void Update()
  312. {
  313. if (!isStart&& strQueue.Count>0)
  314. {
  315. times = 0;
  316. Debug.Log("开始合成 目前clip数量:"+qAudioClips.Count);
  317. isStart = true;
  318. string audio = strQueue.Dequeue();
  319. accLength += audio.Length;
  320. StartCoroutine(开始语音合成(audio, Voice.xiaoyan));
  321. }
  322. if(语音合成流播放器!=null&& qAudioClips.Count>0&&!语音合成流播放器.isPlaying)
  323. {
  324. 语音合成流播放器.clip = qAudioClips.Dequeue();
  325. 语音合成流播放器.Play();
  326. // StartCoroutine(AudioPlayFinish(语音合成流播放器.clip.length));
  327. Debug.Log("开始合成 开始播放 剩余:" + qAudioClips.Count);
  328. }
  329. if (ERNIEBotManager.Instance.isAudioPlay&& 语音合成流播放器!=null&& 语音合成流播放器.clip!=null&& qAudioClips.Count<1)
  330. {
  331. ERNIEBotManager.Instance.isAudioPlay = false;
  332. }
  333. if (qAudioClips.Count > 0)
  334. {
  335. ERNIEBotManager.Instance.isAudioPlay = true;
  336. }
  337. }
  338. private IEnumerator AudioPlayFinish(float times)
  339. {
  340. Debug.Log("语音合成流播放器 " + times);
  341. yield return new WaitForSeconds(times);
  342. 语音合成流播放器.clip = null;
  343. }
  344. void OnAudioRead(float[] data)
  345. {
  346. Debug.Log("开始合成 OnAudioRead :" + data.Length + " 播放队列" + 播放队列.Count);
  347. for (int i = 0; i < data.Length; i++)
  348. {
  349. if (播放队列.Count > 0)
  350. {
  351. data[i] = 播放队列.Dequeue();
  352. }
  353. else
  354. {
  355. if (语音合成WebSocket == null || 语音合成WebSocket.State != WebSocketState.Aborted) 语音流总长度++;
  356. data[i] = 0;
  357. }
  358. }
  359. }
  360. [Serializable]
  361. public class 合成data
  362. {
  363. [Serializable]
  364. public class Data
  365. {
  366. public int status;
  367. public string audio;
  368. }
  369. public Data data;
  370. }
  371. public async void 连接语音合成WebSocket(string urlStr, String text, string voice, int speed, int volume)
  372. {
  373. Debug.Log("开始合成 连接语音合成WebSocket: 开始" + qAudioClips.Count);
  374. //ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3;
  375. using (语音合成WebSocket = new ClientWebSocket())
  376. {
  377. CancellationToken ct = new CancellationToken();
  378. Uri url = new Uri(urlStr);
  379. await 语音合成WebSocket.ConnectAsync(url, ct);
  380. text = Convert.ToBase64String(Encoding.UTF8.GetBytes(text));
  381. string message = "{\"common\":{\"app_id\":\"" + APPID + "\"},\"business\":{\"vcn\":\"" + voice + "\",\"aue\":\"raw\",\"speed\":" + speed + ",\"volume\":" + volume + ",\"tte\":\"UTF8\"}," +
  382. "\"data\":{\"status\":2,\"text\":\"" + text + "\"}}";
  383. Debug.Log("发送消息:" + message);
  384. Debug.Log("连接成功");
  385. await 语音合成WebSocket.SendAsync(new ArraySegment<byte>(Encoding.UTF8.GetBytes(message)), WebSocketMessageType.Binary, true, ct); //发送数据
  386. StringBuilder sb = new StringBuilder();
  387. // 播放队列.Clear();
  388. while (语音合成WebSocket.State == WebSocketState.Open)
  389. {
  390. var result = new byte[4096];
  391. await 语音合成WebSocket.ReceiveAsync(new ArraySegment<byte>(result), ct);//接受数据
  392. List<byte> list = new List<byte>(result); while (list[list.Count - 1] == 0x00) list.RemoveAt(list.Count - 1);//去除空字节
  393. var str = Encoding.UTF8.GetString(list.ToArray());
  394. Debug.Log(str);
  395. sb.Append(str);
  396. if (str.EndsWith("}}"))
  397. {
  398. 合成data.Data data = JsonUtility.FromJson<合成data>(sb.ToString()).data;
  399. Debug.Log("收到完整json数据:" + sb);
  400. sb.Clear();
  401. int status = data.status;
  402. float[] fs = bytesToFloat(Convert.FromBase64String(data.audio));
  403. Debug.Log("收到完整fs长度 :" + fs.Length);
  404. AudioClip clip = AudioClip.Create("语音合成流"+qAudioClips.Count, fs.Length, 1, 16000, false);//播放器最大播放时长一分钟
  405. clip.SetData(fs,0);
  406. qAudioClips.Enqueue(clip);
  407. 语音流总长度 += fs.Length;
  408. // foreach (float f in fs) 播放队列.Enqueue(f);
  409. if (status == 2)
  410. {
  411. isStart = false;
  412. 语音合成WebSocket.Abort();
  413. Debug.Log("开始合成 连接语音合成WebSocket 关闭:" + qAudioClips.Count);
  414. break;
  415. }
  416. }
  417. Debug.Log(" Test " + text.Length + " Audio " + 语音流总长度 +" "+ (语音流总长度 + 12800 + Mathf.Abs((text.Length - 20) * 1024)));
  418. HttpsSendLog.Instance.SendLog("Log", " Test " + text.Length + " Audio " + 语音流总长度 + " " + (语音流总长度+12800 + Mathf.Abs((text.Length - 20) * 1024)));
  419. }
  420. }
  421. }
  422. public static float[] bytesToFloat(byte[] byteArray)//byte[]数组转化为AudioClip可读取的float[]类型
  423. {
  424. float[] sounddata = new float[byteArray.Length / 2];
  425. for (int i = 0; i < sounddata.Length; i++)
  426. {
  427. sounddata[i] = bytesToFloat(byteArray[i * 2], byteArray[i * 2 + 1]);
  428. }
  429. return sounddata;
  430. }
  431. static float bytesToFloat(byte firstByte, byte secondByte)
  432. {
  433. // convert two bytes to one short (little endian)
  434. //小端和大端顺序要调整
  435. short s;
  436. if (BitConverter.IsLittleEndian)
  437. s = (short)((secondByte << 8) | firstByte);
  438. else
  439. s = (short)((firstByte << 8) | secondByte);
  440. // convert to range from -1 to (just below) 1
  441. return s / 32768.0F;
  442. }
  443. #endregion
  444. #region 语音评测
  445. AudioClip 语音评测录音;
  446. public AudioClip 开始语音评测录音()
  447. {
  448. 语音评测录音 = Microphone.Start(null, false, 30, 16000);//开始录音最长30秒
  449. return 语音评测录音;
  450. }
  451. public void 停止语音评测查看结果(string txet,Action<string> 评测结果回调, AudioClip clip=null)
  452. {
  453. if (clip==null)
  454. {
  455. clip = 语音评测录音;
  456. }
  457. byte[] data = 获取真实大小录音(clip);
  458. Microphone.End(null);
  459. StartCoroutine(语音评测(data, txet, 评测结果回调));
  460. }
  461. public IEnumerator 语音评测(byte[] audioData, string 评测文本,Action<string> 评测结果回调)
  462. {
  463. string param = "{\"aue\":\"raw\",\"result_level\":\"entirety\",\"language\":\"en_us\",\"category\":\"read_sentence\"}";
  464. byte[] bytedata = Encoding.UTF8.GetBytes(param);
  465. string x_param = Convert.ToBase64String(bytedata);
  466. TimeSpan ts = DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, 0);
  467. string curTime = Convert.ToInt64(ts.TotalSeconds).ToString();
  468. string checksum = string.Format("{0}{1}{2}", 语音评测APIKey, curTime, x_param);
  469. string x_checksum = Md5(checksum);
  470. string cc = Convert.ToBase64String(audioData);
  471. String Url = "https://api.xfyun.cn/v1/service/v1/ise";
  472. WWWForm form = new WWWForm();
  473. form.AddField("audio", cc);
  474. form.AddField("text", 评测文本);
  475. UnityWebRequest request = UnityWebRequest.Post(Url, form);
  476. request.SetRequestHeader("X-Appid", APPID);
  477. request.SetRequestHeader("X-CurTime", curTime);
  478. request.SetRequestHeader("X-Param", x_param);
  479. request.SetRequestHeader("X-Checksum", x_checksum);
  480. yield return request.SendWebRequest();
  481. if (request.isNetworkError || request.isHttpError)
  482. {
  483. Debug.LogError("网络出错请检查网络连接, err: " + request.error);
  484. }
  485. else
  486. {
  487. 评测结果回调(request.downloadHandler.text);
  488. }
  489. }
  490. public static byte[] 获取真实大小录音(ref AudioClip recordedClip)
  491. {
  492. int position = Microphone.GetPosition(null);
  493. if (position <= 0 || position > recordedClip.samples)
  494. {
  495. position = recordedClip.samples;
  496. }
  497. float[] soundata = new float[position * recordedClip.channels];
  498. recordedClip.GetData(soundata, 0);
  499. recordedClip = AudioClip.Create(recordedClip.name, position, recordedClip.channels, recordedClip.frequency, false);
  500. recordedClip.SetData(soundata, 0);
  501. return 获取音频流片段(0, position * recordedClip.channels, recordedClip);
  502. }
  503. public static byte[] 获取真实大小录音(AudioClip recordedClip)
  504. {
  505. int position = Microphone.GetPosition(null);
  506. if (position <= 0 || position > recordedClip.samples)
  507. {
  508. position = recordedClip.samples;
  509. }
  510. return 获取音频流片段(0, position * recordedClip.channels, recordedClip);
  511. }
  512. public static String Md5(string s)
  513. {
  514. MD5 md5 = new MD5CryptoServiceProvider();
  515. byte[] bytes = Encoding.UTF8.GetBytes(s);
  516. bytes = md5.ComputeHash(bytes);
  517. md5.Clear();
  518. string ret = "";
  519. for (int i = 0; i < bytes.Length; i++)
  520. {
  521. ret += Convert.ToString(bytes[i], 16).PadLeft(2, '0');
  522. }
  523. return ret.PadLeft(32, '0');
  524. }
  525. #endregion
  526. }