前提・実現したいこと
HoloLens2でOpenCV for Unityを使い、円検出をしたいと考えています。HoloLensWithOpenCVForUnityExampleにあるcomicfilterexampleの一部を変更して試みましたが円が描画されません。
発生している問題・エラーメッセージ
HoloLens2のgrayscale映像に円が描画されない。
該当のソースコード
using System; using System.Collections.Generic; using UnityEngine; using UnityEngine.UI; using UnityEngine.SceneManagement; using OpenCVForUnity.CoreModule; using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.UnityUtils.Helper; using OpenCVForUnity.UnityUtils; using HoloLensWithOpenCVForUnity.UnityUtils.Helper; using HoloLensCameraStream; namespace HoloLensWithOpenCVForUnityExample { /// <summary> /// HoloLens Comic Filter Example /// An example of image processing (comic filter) using OpenCVForUnity on Hololens. /// Referring to http://dev.classmethod.jp/smartphone/opencv-manga-2/. /// </summary> [RequireComponent(typeof(HololensCameraStreamToMatHelper))] public class HoloLensComicFilterExample : MonoBehaviour { /// <summary> /// The texture. /// </summary> Texture2D texture; /// <summary> /// The quad renderer. /// </summary> Renderer quad_renderer; /// <summary> /// The web cam texture to mat helper. /// </summary> HololensCameraStreamToMatHelper webCamTextureToMatHelper; public float vignetteScale = 1.5f; public Slider VignetteScaleSlider; readonly static Queue<Action> ExecuteOnMainThread = new Queue<Action>(); [HeaderAttribute("Debug")] public Text renderFPS; public Text videoFPS; public Text trackFPS; public Text debugStr; // Use this for initialization protected void Start() { VignetteScaleSlider.value = vignetteScale; webCamTextureToMatHelper = gameObject.GetComponent<HololensCameraStreamToMatHelper>(); #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API webCamTextureToMatHelper.frameMatAcquired += OnFrameMatAcquired; #endif webCamTextureToMatHelper.outputColorFormat = WebCamTextureToMatHelper.ColorFormat.GRAY; webCamTextureToMatHelper.Initialize(); } /// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { Debug.Log("OnWebCamTextureToMatHelperInitialized"); Mat grayMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(grayMat.cols(), grayMat.rows(), TextureFormat.Alpha8, false); texture.wrapMode = TextureWrapMode.Clamp; quad_renderer = gameObject.GetComponent<Renderer>() as Renderer; quad_renderer.sharedMaterial.SetTexture("_MainTex", texture); //Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); DebugUtils.AddDebugStr(webCamTextureToMatHelper.GetWidth() + " x " + webCamTextureToMatHelper.GetHeight() + " : " + webCamTextureToMatHelper.GetFPS()); Matrix4x4 projectionMatrix; #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API projectionMatrix = webCamTextureToMatHelper.GetProjectionMatrix (); quad_renderer.sharedMaterial.SetMatrix ("_CameraProjectionMatrix", projectionMatrix); #else // This value is obtained from PhotoCapture's TryGetProjectionMatrix() method. I do not know whether this method is good. // Please see the discussion of this thread. Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity projectionMatrix = Matrix4x4.identity; projectionMatrix.m00 = 2.31029f; projectionMatrix.m01 = 0.00000f; projectionMatrix.m02 = 0.09614f; projectionMatrix.m03 = 0.00000f; projectionMatrix.m10 = 0.00000f; projectionMatrix.m11 = 4.10427f; projectionMatrix.m12 = -0.06231f; projectionMatrix.m13 = 0.00000f; projectionMatrix.m20 = 0.00000f; projectionMatrix.m21 = 0.00000f; projectionMatrix.m22 = -1.00000f; projectionMatrix.m23 = 0.00000f; projectionMatrix.m30 = 0.00000f; projectionMatrix.m31 = 0.00000f; projectionMatrix.m32 = -1.00000f; projectionMatrix.m33 = 0.00000f; quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix); #endif quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale); float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11); float aspectRatio = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00; Debug.Log("halfOfVerticalFov " + halfOfVerticalFov); Debug.Log("aspectRatio " + aspectRatio); } /// <summary> /// Raises the web cam texture to mat helper disposed event. /// </summary> public void OnWebCamTextureToMatHelperDisposed() { Debug.Log("OnWebCamTextureToMatHelperDisposed"); lock (ExecuteOnMainThread) { ExecuteOnMainThread.Clear(); } if (debugStr != null) { debugStr.text = string.Empty; } DebugUtils.ClearDebugStr(); } #if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics) { DebugUtils.VideoTick(); //Imgproc.putText(grayMat, "W:" + grayMat.width() + " H:" + grayMat.height() + " SO:" + Screen.orientation, new Point(5, grayMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255), 2, Imgproc.LINE_AA, false); DebugUtils.TrackTick(); Enqueue(() => { if (!webCamTextureToMatHelper.IsPlaying()) return; Utils.fastMatToTexture2D(grayMat, texture); grayMat.Dispose(); Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse; quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix); // Position the canvas object slightly in front // of the real world web camera. Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2) * 2.2f; // Rotate the canvas object so that it faces the user. Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1)); gameObject.transform.position = position; gameObject.transform.rotation = rotation; }); } private void Update() { lock (ExecuteOnMainThread) { while (ExecuteOnMainThread.Count > 0) { ExecuteOnMainThread.Dequeue().Invoke(); } } } private void Enqueue(Action action) { lock (ExecuteOnMainThread) { ExecuteOnMainThread.Enqueue(action); } } #else // Update is called once per frame //円検出 void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { DebugUtils.VideoTick(); Mat grayMat = webCamTextureToMatHelper.GetMat(); using (Mat circles = new Mat()) { Imgproc.HoughCircles(grayMat, circles, Imgproc.CV_HOUGH_GRADIENT, 2, 10, 160, 50, 10, 40); Point pt = new Point(); for (int i = 0; i < circles.cols(); i++) { double[] data = circles.get(0, i); pt.x = data[0]; pt.y = data[1]; double rho = data[2]; Imgproc.circle(grayMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5); } } //Imgproc.putText(grayMat, "W:" + grayMat.width() + " H:" + grayMat.height() + " SO:" + Screen.orientation, new Point(5, grayMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255), 2, Imgproc.LINE_AA, false); DebugUtils.TrackTick(); Utils.fastMatToTexture2D(grayMat, texture); } } #endif
あなたの回答
tips
プレビュー