TextDetectionExample.cs 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. #if !UNITY_WSA_10_0
  2. using UnityEngine;
  3. using UnityEngine.SceneManagement;
  4. using System;
  5. using System.Collections;
  6. using System.Collections.Generic;
  7. using OpenCVForUnity.CoreModule;
  8. using OpenCVForUnity.ImgprocModule;
  9. using OpenCVForUnity.TextModule;
  10. using OpenCVForUnity.ImgcodecsModule;
  11. using OpenCVForUnity.UnityUtils;
  12. namespace OpenCVForUnityExample
  13. {
  14. /// <summary>
  15. /// Text Detection Example
  16. /// A demo script of the Extremal Region Filter algorithm described in:Neumann L., Matas J.: Real-Time Scene Text Localization and Recognition, CVPR 2012.
  17. /// Referring to https://github.com/opencv/opencv_contrib/blob/master/modules/text/samples/textdetection.py.
  18. /// </summary>
  19. public class TextDetectionExample : MonoBehaviour
  20. {
  21. /// <summary>
  22. /// IMAGE_FILENAME
  23. /// </summary>
  24. protected static readonly string IMAGE_FILENAME = "OpenCVForUnity/text/scenetext01.jpg";
  25. /// <summary>
  26. /// The image filepath.
  27. /// </summary>
  28. string image_filepath;
  29. /// <summary>
  30. /// TRAINED_CLASSIFIER_NM_1_FILENAME
  31. /// </summary>
  32. protected static readonly string TRAINED_CLASSIFIER_NM_1_FILENAME = "OpenCVForUnity/text/trained_classifierNM1.xml";
  33. /// <summary>
  34. /// The trained_classifierNM1 filepath.
  35. /// </summary>
  36. string trained_classifierNM1_filepath;
  37. /// <summary>
  38. /// TRAINED_CLASSIFIER_NM_2_FILENAME
  39. /// </summary>
  40. protected static readonly string TRAINED_CLASSIFIER_NM_2_FILENAME = "OpenCVForUnity/text/trained_classifierNM2.xml";
  41. /// <summary>
  42. /// The trained_classifierNM2 filepath.
  43. /// </summary>
  44. string trained_classifierNM2_filepath;
  45. #if UNITY_WEBGL
  46. IEnumerator getFilePath_Coroutine;
  47. #endif
  48. // Use this for initialization
  49. void Start()
  50. {
  51. #if UNITY_WEBGL
  52. getFilePath_Coroutine = GetFilePath ();
  53. StartCoroutine (getFilePath_Coroutine);
  54. #else
  55. image_filepath = Utils.getFilePath(IMAGE_FILENAME);
  56. trained_classifierNM1_filepath = Utils.getFilePath(TRAINED_CLASSIFIER_NM_1_FILENAME);
  57. trained_classifierNM2_filepath = Utils.getFilePath(TRAINED_CLASSIFIER_NM_2_FILENAME);
  58. Run();
  59. #endif
  60. }
  61. #if UNITY_WEBGL
  62. private IEnumerator GetFilePath()
  63. {
  64. var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync (IMAGE_FILENAME, (result) => {
  65. image_filepath = result;
  66. });
  67. yield return getFilePathAsync_0_Coroutine;
  68. var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync (TRAINED_CLASSIFIER_NM_1_FILENAME, (result) => {
  69. trained_classifierNM1_filepath = result;
  70. });
  71. yield return getFilePathAsync_1_Coroutine;
  72. var getFilePathAsync_2_Coroutine = Utils.getFilePathAsync (TRAINED_CLASSIFIER_NM_2_FILENAME, (result) => {
  73. trained_classifierNM2_filepath = result;
  74. });
  75. yield return getFilePathAsync_2_Coroutine;
  76. getFilePath_Coroutine = null;
  77. Run ();
  78. }
  79. #endif
  80. private void Run()
  81. {
  82. //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
  83. Utils.setDebugMode(true);
  84. Mat img = Imgcodecs.imread(image_filepath);
  85. if (img.empty())
  86. {
  87. Debug.LogError(IMAGE_FILENAME + " is not loaded. Please move from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/” to “Assets/StreamingAssets/OpenCVForUnity/” folder.");
  88. }
  89. if (string.IsNullOrEmpty(trained_classifierNM1_filepath) || string.IsNullOrEmpty(trained_classifierNM2_filepath))
  90. {
  91. Debug.LogError(TRAINED_CLASSIFIER_NM_1_FILENAME + " or " + TRAINED_CLASSIFIER_NM_2_FILENAME + " is not loaded. Please move from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/” to “Assets/StreamingAssets/OpenCVForUnity/” folder.");
  92. }
  93. //# for visualization
  94. Mat vis = new Mat();
  95. img.copyTo(vis);
  96. Imgproc.cvtColor(vis, vis, Imgproc.COLOR_BGR2RGB);
  97. //# Extract channels to be processed individually
  98. List<Mat> channels = new List<Mat>();
  99. Text.computeNMChannels(img, channels);
  100. //# Append negative channels to detect ER- (bright regions over dark background)
  101. int cn = channels.Count;
  102. for (int i = 0; i < cn; i++)
  103. {
  104. channels.Add(new Scalar(255) - channels[i]);
  105. }
  106. //# Apply the default cascade classifier to each independent channel (could be done in parallel)
  107. Debug.Log("Extracting Class Specific Extremal Regions from " + channels.Count + " channels ...");
  108. Debug.Log(" (...) this may take a while (...)");
  109. foreach (var channel in channels)
  110. {
  111. ERFilter er1 = Text.createERFilterNM1(trained_classifierNM1_filepath, 16, 0.00015f, 0.13f, 0.2f, true, 0.1f);
  112. ERFilter er2 = Text.createERFilterNM2(trained_classifierNM2_filepath, 0.5f);
  113. List<MatOfPoint> regions = new List<MatOfPoint>();
  114. Text.detectRegions(channel, er1, er2, regions);
  115. MatOfRect matOfRects = new MatOfRect();
  116. Text.erGrouping(img, channel, regions, matOfRects);
  117. // Text.erGrouping (img, channel, regions, matOfRects, Text.ERGROUPING_ORIENTATION_ANY, Utils.getFilePath ("text/trained_classifier_erGrouping.xml"), 0.5f);
  118. List<OpenCVForUnity.CoreModule.Rect> rects = matOfRects.toList();
  119. //#Visualization
  120. foreach (var rect in rects)
  121. {
  122. Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0), 2);
  123. Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 255, 255), 1);
  124. }
  125. }
  126. Texture2D texture = new Texture2D(vis.cols(), vis.rows(), TextureFormat.RGBA32, false);
  127. Utils.matToTexture2D(vis, texture);
  128. gameObject.GetComponent<Renderer>().material.mainTexture = texture;
  129. Utils.setDebugMode(false);
  130. }
  131. // Update is called once per frame
  132. void Update()
  133. {
  134. }
  135. /// <summary>
  136. /// Raises the destroy event.
  137. /// </summary>
  138. void OnDestroy()
  139. {
  140. #if UNITY_WEBGL
  141. if (getFilePath_Coroutine != null) {
  142. StopCoroutine (getFilePath_Coroutine);
  143. ((IDisposable)getFilePath_Coroutine).Dispose ();
  144. }
  145. #endif
  146. }
  147. /// <summary>
  148. /// Raises the back button click event.
  149. /// </summary>
  150. public void OnBackButtonClick()
  151. {
  152. SceneManager.LoadScene("OpenCVForUnityExample");
  153. }
  154. }
  155. }
  156. #endif