Health/Assets/Scripts/UI/Component/WebCamFaceDetectManager.cs

99 lines
3.6 KiB
C#
Raw Normal View History

using OpenCVCompact;
using Serenegiant.UVC;
using UnityEngine;
using UnityEngine.UI;
[RequireComponent(typeof(OpenCVCompact.WebCamTextureToMatHelper))]
public class WebCamFaceDetectManager : FaceDetectManagerBase
{
public RawImage image;
public RawImage cutImage;
bool webCamReady = false;
private int[] _positionRect;
//private Texture2D _cutTexture;
//Image Capture
OpenCVCompact.WebCamTextureToMatHelper _webCamTextureToMatHelper;
protected override void InitMatHelper()
{
_webCamTextureToMatHelper = gameObject.GetComponent<OpenCVCompact.WebCamTextureToMatHelper>();
_webCamTextureToMatHelper.Initialize();
}
protected override Mat GetMat()
{
if (!_webCamTextureToMatHelper.IsPlaying() || !_webCamTextureToMatHelper.DidUpdateThisFrame())
return null;
return _webCamTextureToMatHelper.GetMat();
}
protected override void Dispose()
{
_webCamTextureToMatHelper.Dispose();
}
protected override void DebugPint()
{
if (webCamReady == true)
{
if (mat4Display.rows() == _videoTexture.height)
{
mat4Display.copyTo(mat4DisplayTexture);
Utils.matToTexture2D(mat4DisplayTexture, _videoTexture);
}
if (mat4Process.rows() == cutImage.texture.height)
{
Mat cutMat = mat4Process.clone();
Texture2D cutTexture = new Texture2D(cutMat.cols(), cutMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(cutMat, cutTexture);
cutImage.texture = cutTexture;
}
}
}
public void OnWebCamTextureToMatHelperInitialized()
{
Mat webCamTextureMat = _webCamTextureToMatHelper.GetMat();
_videoTexture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
//gameObject.GetComponent<Renderer>().material.mainTexture = videoTexture;
image.texture = _videoTexture;
_positionRect = PictureUtility.GetPositionRect(webCamTextureMat.cols(), webCamTextureMat.rows());
_cutTexture = new Texture2D(_positionRect[2], _positionRect[3], TextureFormat.RGBA32, false);
cutImage.texture = _cutTexture;
mat4Display = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC4);
mat4DisplayTexture = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC4);
LogPrint.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
dnnUtils.InitHeadPoseEstimationCameraInfo(webCamTextureMat.cols(), webCamTextureMat.rows());
webCamReady = true;
LogPrint.Log("OnWebCamTextureToMatHelperInitialized");
}
public void OnWebCamTextureToMatHelperDisposed()
{
LogPrint.Log("OnWebCamTextureToMatHelperDisposed");
}
public void OnWebCamTextureToMatHelperErrorOccurred(OpenCVCompact.WebCamTextureToMatHelper.ErrorCode errorCode)
{
LogPrint.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
}