using UnityEngine; using UnityEngine.EventSystems; using UnityEngine.SceneManagement; using System.Collections; using System.Collections.Generic; using OpenCVForUnity.CoreModule; using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.VideoModule; using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils.Helper; namespace OpenCVForUnityExample { /// /// CamShift Example /// An example of object tracking using the Video.Camshift function. /// Referring to http://www.computervisiononline.com/blog/tutorial-using-camshift-track-objects-video. /// http://docs.opencv.org/3.2.0/db/df8/tutorial_py_meanshift.html /// [RequireComponent(typeof(WebCamTextureToMatHelper))] public class CamShiftExample : MonoBehaviour { /// /// The texture. /// Texture2D texture; /// /// The roi point list. /// List roiPointList; /// /// The roi rect. /// OpenCVForUnity.CoreModule.Rect roiRect; /// /// The hsv mat. /// Mat hsvMat; /// /// The roi hist mat. /// Mat roiHistMat; /// /// The termination. /// TermCriteria termination; /// /// The webcam texture to mat helper. /// WebCamTextureToMatHelper webCamTextureToMatHelper; /// /// The stored touch point. /// Point storedTouchPoint; /// /// The flag for requesting the start of the CamShift Method. /// bool shouldStartCamShift = false; /// /// The FPS monitor. /// FpsMonitor fpsMonitor; // Use this for initialization void Start() { fpsMonitor = GetComponent(); roiPointList = new List(); termination = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1); webCamTextureToMatHelper = gameObject.GetComponent(); #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); } /// /// Raises the webcam texture to mat helper initialized event. /// public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(webCamTextureMat, texture); gameObject.GetComponent().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("width", webCamTextureMat.width().ToString()); fpsMonitor.Add("height", webCamTextureMat.height().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } if (fpsMonitor != null) { fpsMonitor.consoleText = "Please touch the 4 points surrounding the tracking object."; } float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } hsvMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3); } /// /// Raises the webcam texture to mat helper disposed event. /// public void OnWebCamTextureToMatHelperDisposed() { Debug.Log("OnWebCamTextureToMatHelperDisposed"); hsvMat.Dispose(); if (roiHistMat != null) roiHistMat.Dispose(); roiPointList.Clear(); if (texture != null) { Texture2D.Destroy(texture); texture = null; } } /// /// Raises the webcam texture to mat helper error occurred event. /// /// Error code. public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode) { Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode); } // Update is called once per frame void Update() { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject (t.fingerId)) { storedTouchPoint = new Point (t.position.x, t.position.y); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); } } #else //Mouse if (Input.GetMouseButtonUp(0) && !EventSystem.current.IsPointerOverGameObject()) { storedTouchPoint = new Point(Input.mousePosition.x, Input.mousePosition.y); //Debug.Log ("mouse X " + Input.mousePosition.x); //Debug.Log ("mouse Y " + Input.mousePosition.y); } #endif if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Imgproc.cvtColor(rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); if (storedTouchPoint != null) { ConvertScreenPointToTexturePoint(storedTouchPoint, storedTouchPoint, gameObject, rgbaMat.cols(), rgbaMat.rows()); OnTouch(rgbaMat, storedTouchPoint); storedTouchPoint = null; } Point[] points = roiPointList.ToArray(); if (shouldStartCamShift) { shouldStartCamShift = false; using (MatOfPoint roiPointMat = new MatOfPoint(points)) { roiRect = Imgproc.boundingRect(roiPointMat); } if (roiHistMat != null) { roiHistMat.Dispose(); roiHistMat = null; } roiHistMat = new Mat(); using (Mat roiHSVMat = new Mat(hsvMat, roiRect)) using (Mat maskMat = new Mat()) { Imgproc.calcHist(new List(new Mat[] { roiHSVMat }), new MatOfInt(0), maskMat, roiHistMat, new MatOfInt(16), new MatOfFloat(0, 180)); Core.normalize(roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX); //Debug.Log ("roiHist " + roiHistMat.ToString ()); } } else if (points.Length == 4) { using (Mat backProj = new Mat()) { Imgproc.calcBackProject(new List(new Mat[] { hsvMat }), new MatOfInt(0), roiHistMat, backProj, new MatOfFloat(0, 180), 1.0); RotatedRect r = Video.CamShift(backProj, roiRect, termination); r.points(points); } } if (points.Length < 4) { for (int i = 0; i < points.Length; i++) { Imgproc.circle(rgbaMat, points[i], 6, new Scalar(0, 0, 255, 255), 2); } } else { for (int i = 0; i < 4; i++) { Imgproc.line(rgbaMat, points[i], points[(i + 1) % 4], new Scalar(255, 0, 0, 255), 2); } Imgproc.rectangle(rgbaMat, roiRect.tl(), roiRect.br(), new Scalar(0, 255, 0, 255), 2); } //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture); } } private void OnTouch(Mat img, Point touchPoint) { if (roiPointList.Count == 4) { roiPointList.Clear(); } if (roiPointList.Count < 4) { roiPointList.Add(touchPoint); if (!(new OpenCVForUnity.CoreModule.Rect(0, 0, img.width(), img.height()).contains(roiPointList[roiPointList.Count - 1]))) { roiPointList.RemoveAt(roiPointList.Count - 1); } if (roiPointList.Count == 4) { shouldStartCamShift = true; } } } /// /// Converts the screen point to texture point. /// /// Screen point. /// Dst point. /// Texture quad. /// Texture width. /// Texture height. /// Camera. private void ConvertScreenPointToTexturePoint(Point screenPoint, Point dstPoint, GameObject textureQuad, int textureWidth = -1, int textureHeight = -1, Camera camera = null) { if (textureWidth < 0 || textureHeight < 0) { Renderer r = textureQuad.GetComponent(); if (r != null && r.material != null && r.material.mainTexture != null) { textureWidth = r.material.mainTexture.width; textureHeight = r.material.mainTexture.height; } else { textureWidth = (int)textureQuad.transform.localScale.x; textureHeight = (int)textureQuad.transform.localScale.y; } } if (camera == null) camera = Camera.main; Vector3 quadPosition = textureQuad.transform.localPosition; Vector3 quadScale = textureQuad.transform.localScale; Vector2 tl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z)); Vector2 tr = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y + quadScale.y / 2, quadPosition.z)); Vector2 br = camera.WorldToScreenPoint(new Vector3(quadPosition.x + quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z)); Vector2 bl = camera.WorldToScreenPoint(new Vector3(quadPosition.x - quadScale.x / 2, quadPosition.y - quadScale.y / 2, quadPosition.z)); using (Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2)) using (Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2)) { srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y); dstRectMat.put(0, 0, 0, 0, quadScale.x, 0, quadScale.x, quadScale.y, 0, quadScale.y); using (Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat)) using (MatOfPoint2f srcPointMat = new MatOfPoint2f(screenPoint)) using (MatOfPoint2f dstPointMat = new MatOfPoint2f()) { Core.perspectiveTransform(srcPointMat, dstPointMat, perspectiveTransform); dstPoint.x = dstPointMat.get(0, 0)[0] * textureWidth / quadScale.x; dstPoint.y = dstPointMat.get(0, 0)[1] * textureHeight / quadScale.y; } } } /// /// Raises the destroy event. /// void OnDestroy() { webCamTextureToMatHelper.Dispose(); } /// /// Raises the back button click event. /// public void OnBackButtonClick() { SceneManager.LoadScene("OpenCVForUnityExample"); } /// /// Raises the play button click event. /// public void OnPlayButtonClick() { webCamTextureToMatHelper.Play(); } /// /// Raises the pause button click event. /// public void OnPauseButtonClick() { webCamTextureToMatHelper.Pause(); } /// /// Raises the stop button click event. /// public void OnStopButtonClick() { webCamTextureToMatHelper.Stop(); } /// /// Raises the change camera button click event. /// public void OnChangeCameraButtonClick() { webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing; } } }