Health/Assets/Scripts/PoseCheck/MotionCaptureManager.cs

353 lines
13 KiB
C#
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#undef DEBUG_MODE
#define DEBUG_MODE
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using UnityEngine;
using UnityEngine.Video;
namespace Yoga
{
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class MotionCaptureManager : MonoSingleton<MotionCaptureManager>
{
private WebCamTextureToMatHelper _webCamTextureToMatHelper;
private bool isInited = false;
private Texture2D texture;
private Net _net;
private Dictionary<ModelType, KeypointsModel> _models = new Dictionary<ModelType, KeypointsModel>();
private KeypointsModel _openPoseModel;
public int inpWidth = 416;
public int inpHeight = 416;
public float confThreshold = 0.35f;
public float nmsThreshold = 0.6f;
public int topK = 1000;
public float SamplingRate = 0.3f;
private List<float[]> _voloResult = new List<float[]>();
private List<Point> _currPersonPoints = new List<Point>();
private YOLOv7ObjectDetector _objectDetector;
private Mat _bgrMat;
private double threshold = 0.5;
private bool _isOnCamCapture = false;
public bool IsOnCamCapture { get => _isOnCamCapture; internal set => _isOnCamCapture = value; }
public WebCamTextureToMatHelper WebCamTextureToMatHelper => _webCamTextureToMatHelper;
public List<Point> CurrPersonPoints { get => _currPersonPoints; set => _currPersonPoints = value; }
public List<float[]> VoloResult { get => _voloResult; set => _voloResult = value; }
private bool _isCorrectAction = false;
private Mat _rgbaMat;
public Mat RgbaMat { get => _rgbaMat; set => _rgbaMat = value; }
private void OnEnable()
{
EventManager.Instance.AddEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.AddEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
EventManager.Instance.AddEventListener(YogaEventType.EstimateAction, EstimateAction);
EventManager.Instance.AddEventListener(YogaEventType.ScoreUpdate, ScoreUpdate);
EventManager.Instance.AddEventListener(YogaEventType.GetActionBasePoint, GetActionBasePoint);
}
private void OnDisable()
{
EventManager.Instance.RemoveEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.RemoveEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
EventManager.Instance.RemoveEventListener(YogaEventType.EstimateAction, EstimateAction);
EventManager.Instance.RemoveEventListener(YogaEventType.ScoreUpdate, ScoreUpdate);
EventManager.Instance.RemoveEventListener(YogaEventType.GetActionBasePoint, GetActionBasePoint);
}
public override void Init()
{
base.Init();
_webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
_webCamTextureToMatHelper.Initialize();
YogaManager.Instance.InitData();
//动作引导界面
var video = Resources.Load<VideoClip>(YogaManager.Instance.Action.VideoPath);
UIManager.Instance.Init();
UIManager.Instance.ShowPanel<ActionGuideVideoPanel>(false, video);
LoadModels();//加载模型
CVEstimator.Instance.Init(_objectDetector, _openPoseModel);//初始化姿态检测
}
private void LoadModels()
{
//Utils.setDebugMode(true); //打印日志
_net = null;
//先用YOLOv7检测人体再用OpenPose检测人体姿态
_objectDetector = new YOLOv7ObjectDetector(
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.weights"),
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.cfg"),
Utils.getFilePath("OpenCVForUnity/dnn/coco.names"),
new Size(inpWidth, inpHeight), confThreshold, nmsThreshold/*, topK*/);
var modelFilePath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.OpenPose]);
if (string.IsNullOrEmpty(modelFilePath))
{
Debug.LogError("modelFilePath is empty. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
return;
}
_net = Dnn.readNet(modelFilePath);
_openPoseModel = new KeypointsModel(_net);
_openPoseModel.setInputScale(new Scalar(YogaConfig.InScale));
_openPoseModel.setInputSize(new Size(YogaConfig.InWidth, YogaConfig.InHeight));
_openPoseModel.setInputMean(new Scalar(YogaConfig.InMean));
_openPoseModel.setInputSwapRB(false);
_openPoseModel.setInputCrop(false);
}
private void Update()
{
if (!_isOnCamCapture)
{
return;
}
if (!transform.gameObject.activeSelf)
transform.gameObject.SetActive(true);
if (_webCamTextureToMatHelper.IsPlaying() && _webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat img = _webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
_rgbaMat = img.clone();
if (_net == null)
{
Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
}
if (_voloResult.Count >= 2)
DebugPrintObjectLayout(img, _voloResult[0], _voloResult[1], _voloResult[2]);
if (_currPersonPoints != null && _currPersonPoints.Count > 0)
{
List<Point> points = _currPersonPoints;
for (int i = 0; i < YogaConfig.POSE_PAIRS.GetLength(0); i++)
{
string partFrom = YogaConfig.POSE_PAIRS[i, 0];
string partTo = YogaConfig.POSE_PAIRS[i, 1];
int idFrom = YogaConfig.BODY_PARTS[partFrom];
int idTo = YogaConfig.BODY_PARTS[partTo];
if (points[idFrom] == new Point(-1, -1) || points[idTo] == new Point(-1, -1))
continue;
if (points[idFrom] != null && points[idTo] != null)
{
Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
}
}
}
//#endif
Utils.matToTexture2D(img, texture);
}
}
#region Event Func
private void OnStartMotionCapture()
{
this.enabled = true;
_isOnCamCapture = true;
CVEstimator.Instance.StartEstimation();//开始姿态检测
}
private void OnStopMotionCapture()
{
this.enabled = false;
_isOnCamCapture = false;
}
private void GetActionBasePoint()
{
var startTime = DateTime.Now;
while (true)
{
if (_currPersonPoints != null && _currPersonPoints.Count != 0)
{
YogaManager.Instance.Points = _currPersonPoints;
break;
}
if (startTime.AddMilliseconds(100) < DateTime.Now)
{
Debug.LogError("GetActionBasePoint timeout");
break;
}
}
}
private void EstimateAction(params object[] args)
{
var type = args.FirstOrDefault();
if (type == null)
{
Debug.LogError("EstimateAction type is null");
return;
}
AvatarAction actionType = (AvatarAction)args.FirstOrDefault();
//检测动作
var startTime = DateTime.Now;
while (true)
{
if (YogaManager.Instance.ActionCheckPoints(_currPersonPoints))
{
_isCorrectAction = (_isCorrectAction || YogaManager.Instance.IsCorrectAction(_currPersonPoints, actionType));
break;
}
if (startTime.AddMilliseconds(100) < DateTime.Now)
{
Debug.LogError("请摆正姿势");
Debug.LogError("EstimateAction timeout");
break;
}
}
}
#endregion
private void DebugPrintObjectLayout(Mat image, float[] box, float[] conf, float[] cls, bool isRGB = false)
{
float left = box[0];
float top = box[1];
float right = box[2];
float bottom = box[3];
int classId = (int)cls[0];
Scalar c = _objectDetector.palette[classId % _objectDetector.palette.Count];
Scalar color = isRGB ? c : new Scalar(c.val[2], c.val[1], c.val[0], c.val[3]);
Imgproc.rectangle(image, new Point(left, top), new Point(right, bottom), color, 2);
string label = String.Format("{0:0.00}", conf[0]);
if (_objectDetector.classNames != null && _objectDetector.classNames.Count != 0)
{
if (classId < (int)_objectDetector.classNames.Count)
{
label = _objectDetector.classNames[classId] + " " + label;
}
}
int[] baseLine = new int[1];
Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
top = Mathf.Max((float)top, (float)labelSize.height);
Imgproc.rectangle(image, new Point(left, top - labelSize.height),
new Point(left + labelSize.width, top + baseLine[0]), color, Core.FILLED);
Imgproc.putText(image, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, Scalar.all(255), 1, Imgproc.LINE_AA);
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
_webCamTextureToMatHelper.Dispose();
if (_net != null)
_net.Dispose();
_bgrMat.Dispose();
_models.Clear();
CVEstimator.Instance.Dispose();
}
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = _webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(webCamTextureMat, texture);
this.gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols() / 10, webCamTextureMat.rows() / 10, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
_bgrMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
}
//event call
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (_bgrMat != null)
_bgrMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
public void ScoreUpdate()
{
if (_isCorrectAction)
{
EventManager.Instance.Dispatch(YogaEventType.ActionSuccess);
}
else
{
EventManager.Instance.Dispatch(YogaEventType.ActionFailed);
}
_isCorrectAction = false;//重置
}
}
}