Health/Assets/Scripts/UI/Component/CaptureManagerBase.cs

145 lines
4.9 KiB
C#

using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
namespace Yoga
{
public abstract class CaptureManagerBase : MonoBehaviour
{
public RawImage TargetDisplayer;
protected Mat _bgrMat;
protected bool _isOnCamCapture = false;
protected Texture2D _displayTexture;
public bool IsOnCamCapture { get => _isOnCamCapture; internal set => _isOnCamCapture = value; }
private void Awake()
{
Init();
}
public void Init()
{
Utils.setDebugMode(true); //打印日志
MatInitial();
CVEstimator.Instance.Init();//姿态检测模型初始化
}
private void OnEnable()
{
EventRegist();
}
private void OnDisable()
{
EventRemove();
}
private void Update()
{
if (!_isOnCamCapture)
{
return;
}
if (!transform.gameObject.activeSelf)
transform.gameObject.SetActive(true);
Mat img = GetMat();
if (img == null)
return;
if (YogaManager.Instance.CurrentEstimator.Check(ref img))//检测模型,将错误信息打印在图片上
{
YogaManager.Instance.RgbaMat = img.clone();
//打印
if (GlobalData.Instance.IsEstimationPrintMode)
{
if (YogaManager.Instance.PersonRectResult.Count > 0)
{
var box = YogaManager.Instance.PersonRectResult[0];
float left = box[0];
float top = box[1];
float right = box[2];
float bottom = box[3];
Imgproc.rectangle(img, new Point(left, top), new Point(right, bottom), new Scalar(0, 0, 255), 20);
}
//YogaManager.Instance.CurrentEstimator.DebugPrint(ref img);
if (YogaManager.Instance.CurrEstimateKeyPoints != null && YogaManager.Instance.CurrEstimateKeyPoints.Count > 0)
{
List<Point> points = YogaManager.Instance.CurrEstimateKeyPoints;
for (int i = 0; i < YogaConfig.POSE_PAIRS.GetLength(0); i++)
{
string partFrom = YogaConfig.POSE_PAIRS[i, 0];
string partTo = YogaConfig.POSE_PAIRS[i, 1];
int idFrom = YogaConfig.BODY_PARTS[partFrom];
int idTo = YogaConfig.BODY_PARTS[partTo];
if (points[idFrom] == new Point(-1, -1) || points[idTo] == new Point(-1, -1))
continue;
if (points[idFrom] != null && points[idTo] != null)
{
Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
}
}
}
}
}
Utils.matToTexture2D(img, _displayTexture);
}
private void OnDestroy()
{
OnRelease();
}
#region Event Func
private void OnStartMotionCapture()
{
this.enabled = true;
_isOnCamCapture = true;
CVEstimator.Instance.StartEstimation();//开始姿态检测
}
private void OnStopMotionCapture()
{
this.enabled = false;
_isOnCamCapture = false;
}
#endregion
protected virtual void OnRelease()
{
YogaManager.Instance.CurrentEstimator.Dispose();
if (_bgrMat != null)
_bgrMat.Dispose();
CVEstimator.Instance.Dispose();
}
protected virtual void EventRegist()
{
EventManager.Instance.AddEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.AddEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
}
protected virtual void EventRemove()
{
EventManager.Instance.RemoveEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.RemoveEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
}
protected abstract void MatInitial();
protected abstract Mat GetMat();
}
}