VideoStreamSender.cs 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823
  1. using Unity.WebRTC;
  2. using UnityEngine;
  3. using System;
  4. using System.Collections;
  5. using System.Collections.Generic;
  6. using System.Linq;
  7. using UnityEngine.Rendering;
  8. using UnityEngine.Experimental.Rendering;
  9. namespace Unity.RenderStreaming
  10. {
  11. internal sealed class StreamingSizeAttribute : PropertyAttribute { }
  12. internal sealed class FrameRateAttribute : PropertyAttribute { }
  13. internal sealed class BitrateAttribute : PropertyAttribute
  14. {
  15. public int minValue;
  16. public int maxValue;
  17. public BitrateAttribute(int minValue, int maxValue)
  18. {
  19. this.minValue = minValue;
  20. this.maxValue = maxValue;
  21. }
  22. }
  23. internal sealed class RenderTextureAntiAliasingAttribute : PropertyAttribute { }
  24. internal sealed class RenderTextureDepthBufferAttribute : PropertyAttribute { }
  25. internal sealed class WebCamDeviceAttribute : PropertyAttribute { }
  26. internal sealed class ScaleResolutionAttribute : PropertyAttribute { }
  27. [Serializable]
  28. internal struct Range
  29. {
  30. public uint min;
  31. public uint max;
  32. public Range(uint min, uint max) { this.min = min; this.max = max; }
  33. }
  34. internal sealed class CodecAttribute : PropertyAttribute { }
  35. internal static class RTCRtpSenderExtension
  36. {
  37. public static RTCError SetFrameRate(this RTCRtpSender sender, uint framerate)
  38. {
  39. if (sender.Track.Kind != TrackKind.Video)
  40. throw new ArgumentException();
  41. RTCRtpSendParameters parameters = sender.GetParameters();
  42. foreach (var encoding in parameters.encodings)
  43. {
  44. encoding.maxFramerate = framerate;
  45. }
  46. return sender.SetParameters(parameters);
  47. }
  48. public static RTCError SetScaleResolutionDown(this RTCRtpSender sender, double? scaleFactor)
  49. {
  50. if (sender.Track.Kind != TrackKind.Video)
  51. throw new ArgumentException();
  52. RTCRtpSendParameters parameters = sender.GetParameters();
  53. foreach (var encoding in parameters.encodings)
  54. {
  55. encoding.scaleResolutionDownBy = scaleFactor;
  56. }
  57. return sender.SetParameters(parameters);
  58. }
  59. public static RTCError SetBitrate(this RTCRtpSender sender, uint? minBitrate, uint? maxBitrate)
  60. {
  61. RTCRtpSendParameters parameters = sender.GetParameters();
  62. foreach (var encoding in parameters.encodings)
  63. {
  64. encoding.minBitrate = minBitrate * 1000;
  65. encoding.maxBitrate = maxBitrate * 1000;
  66. }
  67. return sender.SetParameters(parameters);
  68. }
  69. }
  70. /// <summary>
  71. ///
  72. /// </summary>
  73. public enum VideoStreamSource
  74. {
  75. /// <summary>
  76. ///
  77. /// </summary>
  78. Camera = 0,
  79. /// <summary>
  80. ///
  81. /// </summary>
  82. Screen = 1,
  83. /// <summary>
  84. ///
  85. /// </summary>
  86. WebCamera = 2,
  87. /// <summary>
  88. ///
  89. /// </summary>
  90. Texture = 3
  91. }
  92. /// <summary>
  93. ///
  94. /// </summary>
  95. [AddComponentMenu("Render Streaming/Video Stream Sender")]
  96. public class VideoStreamSender : StreamSenderBase
  97. {
  98. static readonly float s_defaultFrameRate = 30;
  99. static readonly uint s_defaultMinBitrate = 0;
  100. static readonly uint s_defaultMaxBitrate = 1000;
  101. static readonly int s_defaultDepth = 16;
  102. //todo(kazuki): remove this value.
  103. [SerializeField, StreamingSize]
  104. private Vector2Int m_TextureSize = new Vector2Int(1280, 720);
  105. [SerializeField]
  106. private VideoStreamSource m_Source;
  107. [SerializeField]
  108. private Camera m_Camera;
  109. [SerializeField]
  110. private Texture m_Texture;
  111. [SerializeField, WebCamDevice]
  112. private int m_WebCamDeviceIndex;
  113. [SerializeField, RenderTextureDepthBuffer]
  114. private int m_Depth = s_defaultDepth;
  115. [SerializeField, RenderTextureAntiAliasing]
  116. private int m_AntiAliasing = 1;
  117. [SerializeField, Codec]
  118. private VideoCodecInfo m_Codec;
  119. [SerializeField, FrameRate]
  120. private float m_FrameRate = s_defaultFrameRate;
  121. [SerializeField, Bitrate(0, 10000)]
  122. private Range m_Bitrate = new Range(s_defaultMinBitrate, s_defaultMaxBitrate);
  123. [SerializeField, ScaleResolution]
  124. private float m_ScaleFactor = 1f;
  125. [SerializeField]
  126. private bool m_AutoRequestUserAuthorization = true;
  127. private VideoStreamSourceImpl m_sourceImpl = null;
  128. /// <summary>
  129. ///
  130. /// </summary>
  131. public VideoStreamSource source
  132. {
  133. get { return m_Source; }
  134. set
  135. {
  136. if (m_Source == value)
  137. return;
  138. m_Source = value;
  139. if (!isPlaying)
  140. return;
  141. var op = CreateTrack();
  142. StartCoroutineWithCallback(op, _ => ReplaceTrack(_.Track));
  143. }
  144. }
  145. /// <summary>
  146. ///
  147. /// </summary>
  148. public Camera sourceCamera
  149. {
  150. get { return m_Camera; }
  151. set
  152. {
  153. if (m_Camera == value)
  154. return;
  155. m_Camera = value;
  156. if (!isPlaying || m_Source != VideoStreamSource.Camera)
  157. return;
  158. var op = CreateTrack();
  159. StartCoroutineWithCallback(op, _ => ReplaceTrack(_.Track));
  160. }
  161. }
  162. /// <summary>
  163. ///
  164. /// </summary>
  165. public Texture sourceTexture
  166. {
  167. get { return m_Texture; }
  168. set
  169. {
  170. if (m_Texture == value)
  171. return;
  172. m_Texture = value;
  173. if (!isPlaying || m_Source != VideoStreamSource.Texture)
  174. return;
  175. var op = CreateTrack();
  176. StartCoroutineWithCallback(op, _ => ReplaceTrack(_.Track));
  177. }
  178. }
  179. /// <summary>
  180. /// The index of WebCamTexture.devices.
  181. /// </summary>
  182. public int sourceDeviceIndex
  183. {
  184. get { return m_WebCamDeviceIndex; }
  185. set
  186. {
  187. if (m_WebCamDeviceIndex == value)
  188. return;
  189. m_WebCamDeviceIndex = value;
  190. if (!isPlaying || m_Source != VideoStreamSource.WebCamera)
  191. return;
  192. var op = CreateTrack();
  193. StartCoroutineWithCallback(op, _ => ReplaceTrack(_.Track));
  194. }
  195. }
  196. /// <summary>
  197. ///
  198. /// </summary>
  199. public WebCamTexture sourceWebCamTexture
  200. {
  201. get
  202. {
  203. if(m_sourceImpl is VideoStreamSourceWebCam source)
  204. {
  205. return source.webCamTexture;
  206. }
  207. return null;
  208. }
  209. }
  210. /// <summary>
  211. ///
  212. /// </summary>
  213. public float frameRate
  214. {
  215. get { return m_FrameRate; }
  216. }
  217. /// <summary>
  218. ///
  219. /// </summary>
  220. public uint minBitrate
  221. {
  222. get { return m_Bitrate.min; }
  223. }
  224. /// <summary>
  225. ///
  226. /// </summary>
  227. public uint maxBitrate
  228. {
  229. get { return m_Bitrate.max; }
  230. }
  231. /// <summary>
  232. ///
  233. /// </summary>
  234. public float scaleResolutionDown
  235. {
  236. get { return m_ScaleFactor; }
  237. }
  238. /// <summary>
  239. ///
  240. /// </summary>
  241. public uint width
  242. {
  243. get { return (uint)m_TextureSize.x; }
  244. set
  245. {
  246. SetTextureSize(new Vector2Int((int)value, m_TextureSize.y));
  247. }
  248. }
  249. /// <summary>
  250. ///
  251. /// </summary>
  252. public uint height
  253. {
  254. get { return (uint)m_TextureSize.y; }
  255. set
  256. {
  257. SetTextureSize(new Vector2Int(m_TextureSize.x, (int)value));
  258. }
  259. }
  260. /// <summary>
  261. ///
  262. /// </summary>
  263. public VideoCodecInfo codec
  264. {
  265. get { return m_Codec; }
  266. }
  267. /// <summary>
  268. ///
  269. /// </summary>
  270. public bool autoRequestUserAuthorization
  271. {
  272. get => m_AutoRequestUserAuthorization;
  273. set { m_AutoRequestUserAuthorization = value; }
  274. }
  275. /// <summary>
  276. ///
  277. /// </summary>
  278. /// <param name="codec"></param>
  279. public void SetCodec(VideoCodecInfo codec)
  280. {
  281. if (isPlaying)
  282. throw new InvalidOperationException("Can not change this parameter after the streaming is started.");
  283. m_Codec = codec;
  284. foreach (var transceiver in Transceivers.Values)
  285. {
  286. if (!string.IsNullOrEmpty(transceiver.Mid))
  287. continue;
  288. if (transceiver.Sender.Track.ReadyState == TrackState.Ended)
  289. continue;
  290. var codecs = new[] { m_Codec };
  291. RTCErrorType error = transceiver.SetCodecPreferences(SelectCodecCapabilities(codecs).ToArray());
  292. if (error != RTCErrorType.None)
  293. throw new InvalidOperationException($"Set codec is failed. errorCode={error}");
  294. }
  295. }
  296. /// <summary>
  297. ///
  298. /// </summary>
  299. /// <returns></returns>
  300. public static IEnumerable<VideoCodecInfo> GetAvailableCodecs()
  301. {
  302. string[] excludeCodecMimeType = { "video/red", "video/ulpfec", "video/rtx", "video/flexfec-03" };
  303. var capabilities = RTCRtpSender.GetCapabilities(TrackKind.Video);
  304. return capabilities.codecs.Where(codec => !excludeCodecMimeType.Contains(codec.mimeType)).Select(codec => VideoCodecInfo.Create(codec));
  305. }
  306. /// <summary>
  307. ///
  308. /// </summary>
  309. /// <param name="frameRate"></param>
  310. public void SetFrameRate(float frameRate)
  311. {
  312. if (frameRate < 0)
  313. throw new ArgumentOutOfRangeException("frameRate", frameRate, "The parameter must be greater than zero.");
  314. m_FrameRate = frameRate;
  315. foreach (var transceiver in Transceivers.Values)
  316. {
  317. RTCError error = transceiver.Sender.SetFrameRate((uint)m_FrameRate);
  318. if (error.errorType != RTCErrorType.None)
  319. throw new InvalidOperationException($"Set framerate is failed. {error.message}");
  320. }
  321. }
  322. /// <summary>
  323. ///
  324. /// </summary>
  325. /// <param name="bitrate"></param>
  326. public void SetBitrate(uint minBitrate, uint maxBitrate)
  327. {
  328. if (minBitrate > maxBitrate)
  329. throw new ArgumentException("The maxBitrate must be greater than minBitrate.", "maxBitrate");
  330. m_Bitrate.min = minBitrate;
  331. m_Bitrate.max = maxBitrate;
  332. foreach (var transceiver in Transceivers.Values)
  333. {
  334. RTCError error = transceiver.Sender.SetBitrate(m_Bitrate.min, m_Bitrate.max);
  335. if (error.errorType != RTCErrorType.None)
  336. throw new InvalidOperationException($"Set codec is failed. {error.message}");
  337. }
  338. }
  339. /// <summary>
  340. ///
  341. /// </summary>
  342. /// <param name="scaleFactor">The parameter must be greater than 1.0f.</param>
  343. public void SetScaleResolutionDown(float scaleFactor)
  344. {
  345. if (scaleFactor < 1.0f)
  346. throw new ArgumentOutOfRangeException("scaleFactor", scaleFactor, "The parameter must be greater than 1.0f. Scaleup is not allowed.");
  347. m_ScaleFactor = scaleFactor;
  348. foreach (var transceiver in Transceivers.Values)
  349. {
  350. double? value = Mathf.Approximately(m_ScaleFactor, 1) ? (double?)null : m_ScaleFactor;
  351. RTCError error = transceiver.Sender.SetScaleResolutionDown(value);
  352. if (error.errorType != RTCErrorType.None)
  353. throw new InvalidOperationException($"Set codec is failed. {error.message}");
  354. }
  355. }
  356. /// <summary>
  357. ///
  358. /// </summary>
  359. /// <param name="size"></param>
  360. public void SetTextureSize(Vector2Int size)
  361. {
  362. m_TextureSize = size;
  363. if (!isPlaying)
  364. return;
  365. var op = CreateTrack();
  366. StartCoroutineWithCallback(op, _ => ReplaceTrack(_.Track));
  367. }
  368. private protected virtual void Awake()
  369. {
  370. OnStoppedStream += _OnStoppedStream;
  371. }
  372. private protected override void OnDestroy()
  373. {
  374. base.OnDestroy();
  375. m_sourceImpl?.Dispose();
  376. m_sourceImpl = null;
  377. }
  378. void _OnStoppedStream(string connectionId)
  379. {
  380. m_sourceImpl?.Dispose();
  381. m_sourceImpl = null;
  382. }
  383. internal override WaitForCreateTrack CreateTrack()
  384. {
  385. m_sourceImpl?.Dispose();
  386. m_sourceImpl = CreateVideoStreamSource();
  387. return m_sourceImpl.CreateTrack();
  388. }
  389. VideoStreamSourceImpl CreateVideoStreamSource()
  390. {
  391. switch (m_Source)
  392. {
  393. case VideoStreamSource.Camera:
  394. return new VideoStreamSourceCamera(this);
  395. case VideoStreamSource.Screen:
  396. return new VideoStreamSourceScreen(this);
  397. case VideoStreamSource.Texture:
  398. return new VideoStreamSourceTexture(this);
  399. case VideoStreamSource.WebCamera:
  400. return new VideoStreamSourceWebCam(this);
  401. }
  402. throw new InvalidOperationException("");
  403. }
  404. abstract class VideoStreamSourceImpl : IDisposable
  405. {
  406. public VideoStreamSourceImpl(VideoStreamSender parent)
  407. {
  408. width = (int)parent.width;
  409. height = (int)parent.height;
  410. depth = parent.m_Depth;
  411. antiAliasing = parent.m_AntiAliasing;
  412. }
  413. public abstract WaitForCreateTrack CreateTrack();
  414. public abstract void Dispose();
  415. public int width { get; private set; }
  416. public int height { get; private set; }
  417. public int depth { get; private set; }
  418. public int antiAliasing { get; private set; }
  419. }
  420. class VideoStreamSourceCamera : VideoStreamSourceImpl
  421. {
  422. private Camera m_camera;
  423. private RenderTexture m_renderTexture;
  424. public VideoStreamSourceCamera(VideoStreamSender parent) : base(parent)
  425. {
  426. Camera camera = parent.m_Camera;
  427. if (camera == null)
  428. throw new ArgumentNullException("camera", "The sourceCamera is not assigned.");
  429. m_camera = camera;
  430. }
  431. public override WaitForCreateTrack CreateTrack()
  432. {
  433. if (m_camera.targetTexture != null)
  434. {
  435. m_renderTexture = m_camera.targetTexture;
  436. RenderTextureFormat supportFormat = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
  437. GraphicsFormat graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(supportFormat, RenderTextureReadWrite.Default);
  438. GraphicsFormat compatibleFormat = SystemInfo.GetCompatibleFormat(graphicsFormat, FormatUsage.Render);
  439. GraphicsFormat format = graphicsFormat == compatibleFormat ? graphicsFormat : compatibleFormat;
  440. if (m_renderTexture.graphicsFormat != format)
  441. {
  442. Debug.LogWarning(
  443. $"This color format:{m_renderTexture.graphicsFormat} not support in unity.webrtc. Change to supported color format:{format}.");
  444. m_renderTexture.Release();
  445. m_renderTexture.graphicsFormat = format;
  446. m_renderTexture.Create();
  447. }
  448. m_camera.targetTexture = m_renderTexture;
  449. }
  450. else
  451. {
  452. RenderTextureFormat format = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
  453. m_renderTexture = new RenderTexture((int)width, (int)height, depth, format)
  454. {
  455. antiAliasing = antiAliasing
  456. };
  457. m_renderTexture.Create();
  458. m_camera.targetTexture = m_renderTexture;
  459. }
  460. var instruction = new WaitForCreateTrack();
  461. instruction.Done(new VideoStreamTrack(m_renderTexture));
  462. return instruction;
  463. }
  464. public override void Dispose()
  465. {
  466. if (m_renderTexture == null)
  467. return;
  468. m_camera.targetTexture = null;
  469. m_renderTexture.Release();
  470. Destroy(m_renderTexture);
  471. m_renderTexture = null;
  472. GC.SuppressFinalize(this);
  473. }
  474. ~VideoStreamSourceCamera()
  475. {
  476. Dispose();
  477. }
  478. }
  479. class VideoStreamSourceTexture : VideoStreamSourceImpl
  480. {
  481. Texture m_texture;
  482. Texture m_copyTexture;
  483. private Coroutine m_coroutineScreenCapture;
  484. private MonoBehaviour m_behaviour;
  485. public VideoStreamSourceTexture(VideoStreamSender parent) : base(parent)
  486. {
  487. Texture texture = parent.m_Texture;
  488. if (texture == null)
  489. throw new ArgumentNullException("texture", "The sourceTexture is not assigned.");
  490. m_texture = texture;
  491. m_behaviour = parent;
  492. }
  493. public override WaitForCreateTrack CreateTrack()
  494. {
  495. var instruction = new WaitForCreateTrack();
  496. GraphicsFormat format =
  497. WebRTC.WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType);
  498. if (m_texture.graphicsFormat == format && m_texture.width == width && m_texture.height == height)
  499. {
  500. instruction.Done(new VideoStreamTrack(m_texture));
  501. return instruction;
  502. }
  503. m_copyTexture = new Texture2D(width, height, format, TextureCreationFlags.None);
  504. m_coroutineScreenCapture = m_behaviour.StartCoroutine(RecordScreenFrame());
  505. instruction.Done(new VideoStreamTrack(m_copyTexture));
  506. return instruction;
  507. }
  508. public override void Dispose()
  509. {
  510. if (m_copyTexture != null)
  511. {
  512. Destroy(m_copyTexture);
  513. m_copyTexture = null;
  514. }
  515. if (m_coroutineScreenCapture != null)
  516. {
  517. m_behaviour.StopCoroutine(m_coroutineScreenCapture);
  518. m_behaviour = null;
  519. m_coroutineScreenCapture = null;
  520. }
  521. GC.SuppressFinalize(this);
  522. }
  523. ~VideoStreamSourceTexture()
  524. {
  525. Dispose();
  526. }
  527. IEnumerator RecordScreenFrame()
  528. {
  529. while (true)
  530. {
  531. yield return new WaitForEndOfFrame();
  532. Graphics.ConvertTexture(m_texture, m_copyTexture);
  533. }
  534. }
  535. }
  536. class VideoStreamSourceScreen : VideoStreamSourceImpl, IDisposable
  537. {
  538. private RenderTexture m_screenTexture;
  539. private Texture m_screenCopyTexture;
  540. private Coroutine m_coroutineScreenCapture;
  541. private MonoBehaviour m_behaviour;
  542. public VideoStreamSourceScreen(VideoStreamSender parent) : base(parent)
  543. {
  544. m_behaviour = parent;
  545. }
  546. static Vector2Int GetScreenSize()
  547. {
  548. /// Screen.width/height returns size of the active window.
  549. /// However, it is mandatory to get size of the game view when player mode.
  550. /// UnityStats is used here because it returns the size of game view anytime.
  551. #if UNITY_EDITOR
  552. string[] screenres = UnityEditor.UnityStats.screenRes.Split('x');
  553. int screenWidth = int.Parse(screenres[0]);
  554. int screenHeight = int.Parse(screenres[1]);
  555. /// Set Screen.width/height forcely because UnityStats returns zero when batch mode.
  556. if (screenWidth == 0 || screenHeight == 0)
  557. {
  558. screenWidth = Screen.width;
  559. screenHeight = Screen.height;
  560. }
  561. #else
  562. int screenWidth = Screen.width;
  563. int screenHeight = Screen.height;
  564. #endif
  565. return new Vector2Int(screenWidth, screenHeight);
  566. }
  567. public override WaitForCreateTrack CreateTrack()
  568. {
  569. Vector2Int screenSize = GetScreenSize();
  570. m_screenTexture =
  571. new RenderTexture(screenSize.x, screenSize.y, depth, RenderTextureFormat.Default) { antiAliasing = antiAliasing };
  572. m_screenTexture.Create();
  573. GraphicsFormat format = WebRTC.WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType);
  574. m_screenCopyTexture = new Texture2D(width, height, format, TextureCreationFlags.None);
  575. // The texture obtained by ScreenCapture.CaptureScreenshotIntoRenderTexture is different between OpenGL and other Graphics APIs.
  576. // In OpenGL, we got a texture that is not inverted, so need flip when sending.
  577. var isOpenGl = SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore ||
  578. SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2 ||
  579. SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3;
  580. m_coroutineScreenCapture = m_behaviour.StartCoroutine(RecordScreenFrame());
  581. var instruction = new WaitForCreateTrack();
  582. instruction.Done(new VideoStreamTrack(m_screenCopyTexture, isOpenGl));
  583. return instruction;
  584. }
  585. IEnumerator RecordScreenFrame()
  586. {
  587. while (true)
  588. {
  589. yield return new WaitForEndOfFrame();
  590. ScreenCapture.CaptureScreenshotIntoRenderTexture(m_screenTexture);
  591. Graphics.ConvertTexture(m_screenTexture, m_screenCopyTexture);
  592. }
  593. }
  594. public override void Dispose()
  595. {
  596. if (m_screenTexture == null)
  597. return;
  598. m_screenTexture.Release();
  599. Destroy(m_screenTexture);
  600. m_screenTexture = null;
  601. Destroy(m_screenCopyTexture);
  602. m_screenCopyTexture = null;
  603. m_behaviour.StopCoroutine(m_coroutineScreenCapture);
  604. m_behaviour = null;
  605. m_coroutineScreenCapture = null;
  606. GC.SuppressFinalize(this);
  607. }
  608. ~VideoStreamSourceScreen()
  609. {
  610. Dispose();
  611. }
  612. }
  613. class VideoStreamSourceWebCam : VideoStreamSourceImpl, IDisposable
  614. {
  615. int m_deviceIndex;
  616. bool m_autoRequestUserAuthorization;
  617. float m_frameRate;
  618. WebCamTexture m_webcamTexture;
  619. Texture m_webcamCopyTexture;
  620. VideoStreamSender m_parent;
  621. Coroutine m_coroutineConvertFrame;
  622. public WebCamTexture webCamTexture => m_webcamTexture;
  623. public VideoStreamSourceWebCam(VideoStreamSender parent) : base(parent)
  624. {
  625. int deviceIndex = parent.m_WebCamDeviceIndex;
  626. if (deviceIndex < 0 || WebCamTexture.devices.Length <= deviceIndex)
  627. throw new ArgumentOutOfRangeException("deviceIndex", deviceIndex, "The deviceIndex is out of range");
  628. m_parent = parent;
  629. m_deviceIndex = deviceIndex;
  630. m_frameRate = parent.m_FrameRate;
  631. m_autoRequestUserAuthorization = parent.m_AutoRequestUserAuthorization;
  632. }
  633. public override WaitForCreateTrack CreateTrack()
  634. {
  635. var instruction = new WaitForCreateTrack();
  636. m_parent.StartCoroutine(CreateTrackCoroutine(instruction));
  637. return instruction;
  638. }
  639. IEnumerator CreateTrackCoroutine(WaitForCreateTrack instruction)
  640. {
  641. if (m_autoRequestUserAuthorization)
  642. {
  643. AsyncOperation op = Application.RequestUserAuthorization(UserAuthorization.WebCam);
  644. yield return op;
  645. }
  646. if (!Application.HasUserAuthorization(UserAuthorization.WebCam))
  647. throw new InvalidOperationException("Call Application.RequestUserAuthorization before creating track with WebCam.");
  648. WebCamDevice userCameraDevice = WebCamTexture.devices[m_deviceIndex];
  649. m_webcamTexture = new WebCamTexture(userCameraDevice.name, width, height, (int)m_frameRate);
  650. m_webcamTexture.Play();
  651. yield return new WaitUntil(() => m_webcamTexture.didUpdateThisFrame);
  652. if (m_webcamTexture.width != width || m_webcamTexture.height != height)
  653. {
  654. Destroy(m_webcamTexture);
  655. m_webcamTexture = null;
  656. throw new InvalidOperationException($"The device doesn't support the resolution. {width} x {height}");
  657. }
  658. /// Convert texture if the graphicsFormat is not supported.
  659. /// Since Unity 2022.1, WebCamTexture.graphicsFormat returns R8G8B8A8_SRGB on Android Vulkan.
  660. /// WebRTC doesn't support the graphics format when using Vulkan, and throw exception.
  661. var supportedFormat = WebRTC.WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType);
  662. if (m_webcamTexture.graphicsFormat != supportedFormat)
  663. {
  664. m_webcamCopyTexture = new Texture2D(width, height, supportedFormat, TextureCreationFlags.None);
  665. instruction.Done(new VideoStreamTrack(m_webcamCopyTexture));
  666. m_coroutineConvertFrame = m_parent.StartCoroutine(ConvertFrame());
  667. }
  668. else
  669. {
  670. instruction.Done(new VideoStreamTrack(m_webcamTexture));
  671. }
  672. }
  673. IEnumerator ConvertFrame()
  674. {
  675. while (true)
  676. {
  677. yield return new WaitForEndOfFrame();
  678. Graphics.ConvertTexture(m_webcamTexture, m_webcamCopyTexture);
  679. }
  680. }
  681. public override void Dispose()
  682. {
  683. if(m_coroutineConvertFrame != null)
  684. {
  685. m_parent.StopCoroutine(m_coroutineConvertFrame);
  686. m_parent = null;
  687. Destroy(m_webcamCopyTexture);
  688. m_webcamCopyTexture = null;
  689. }
  690. if (m_webcamTexture == null)
  691. return;
  692. m_webcamTexture.Stop();
  693. Destroy(m_webcamTexture);
  694. m_webcamTexture = null;
  695. GC.SuppressFinalize(this);
  696. }
  697. ~VideoStreamSourceWebCam()
  698. {
  699. Dispose();
  700. }
  701. }
  702. internal IEnumerable<RTCRtpCodecCapability> SelectCodecCapabilities(IEnumerable<VideoCodecInfo> codecs)
  703. {
  704. return RTCRtpSender.GetCapabilities(TrackKind.Video).SelectCodecCapabilities(codecs);
  705. }
  706. }
  707. }