Health/Assets/Scripts/PoseCheck/MotionCaptureManager.cs

459 lines
17 KiB
C#
Raw Normal View History

2023-11-07 13:55:35 +00:00
#undef DEBUG_MODE
#define DEBUG_MODE
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
2023-11-12 15:09:33 +00:00
using System.Threading.Tasks;
2023-11-07 13:55:35 +00:00
using UnityEngine;
using UnityEngine.Video;
namespace Yoga
{
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class MotionCaptureManager : MonoSingleton<MotionCaptureManager>
{
private WebCamTextureToMatHelper _webCamTextureToMatHelper;
private bool isInited = false;
private Texture2D texture;
private Net _net;
private Dictionary<ModelType, KeypointsModel> _models = new Dictionary<ModelType, KeypointsModel>();
private KeypointsModel _openPoseModel;
public int inpWidth = 416;
public int inpHeight = 416;
public float confThreshold = 0.35f;
public float nmsThreshold = 0.6f;
public int topK = 1000;
public float SamplingRate = 0.3f;
private List<float[]> _voloResult = new List<float[]>();
2023-11-12 15:09:33 +00:00
private List<Point> _currPersonPoints = new List<Point>();
2023-11-07 13:55:35 +00:00
private YOLOv7ObjectDetector _objectDetector;
private Mat _bgrMat;
private double threshold = 0.5;
private bool _isOnCamCapture = false;
public bool IsOnCamCapture { get => _isOnCamCapture; internal set => _isOnCamCapture = value; }
2023-11-12 15:09:33 +00:00
public WebCamTextureToMatHelper WebCamTextureToMatHelper => _webCamTextureToMatHelper;
public List<Point> CurrPersonPoints { get => _currPersonPoints; set => _currPersonPoints = value; }
public List<float[]> VoloResult { get => _voloResult; set => _voloResult = value; }
private bool _isCorrectAction = false;
private Mat _rgbaMat;
public Mat RgbaMat { get => _rgbaMat; set => _rgbaMat = value; }
2023-11-07 13:55:35 +00:00
private void OnEnable()
{
2023-11-10 08:12:37 +00:00
EventManager.Instance.AddEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.AddEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
EventManager.Instance.AddEventListener(YogaEventType.EstimateAction, EstimateAction);
EventManager.Instance.AddEventListener(YogaEventType.ScoreUpdate, ScoreUpdate);
EventManager.Instance.AddEventListener(YogaEventType.GetActionBasePoint, GetActionBasePoint);
2023-11-07 13:55:35 +00:00
}
2023-11-07 15:28:19 +00:00
private void OnDisable()
{
2023-11-10 08:12:37 +00:00
EventManager.Instance.RemoveEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
EventManager.Instance.RemoveEventListener(YogaEventType.StopMotionCapture, OnStopMotionCapture);
EventManager.Instance.RemoveEventListener(YogaEventType.EstimateAction, EstimateAction);
EventManager.Instance.RemoveEventListener(YogaEventType.ScoreUpdate, ScoreUpdate);
EventManager.Instance.RemoveEventListener(YogaEventType.GetActionBasePoint, GetActionBasePoint);
2023-11-07 15:28:19 +00:00
}
2023-11-07 13:55:35 +00:00
public override void Init()
{
base.Init();
_webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
_webCamTextureToMatHelper.Initialize();
2023-11-07 15:28:19 +00:00
YogaManager.Instance.InitData();
2023-11-07 13:55:35 +00:00
//动作引导界面
2023-11-10 07:17:23 +00:00
var video = Resources.Load<VideoClip>(YogaManager.Instance.Action.VideoPath);
2023-11-07 13:55:35 +00:00
UIManager.Instance.Init();
UIManager.Instance.ShowPanel<ActionGuideVideoPanel>(false, video);
LoadModels();//加载模型
2023-11-12 15:09:33 +00:00
CVEstimator.Instance.Init(_objectDetector, _openPoseModel);//初始化姿态检测
2023-11-07 13:55:35 +00:00
}
private void LoadModels()
{
Utils.setDebugMode(true); //打印日志
_net = null;
//先用YOLOv7检测人体再用OpenPose检测人体姿态
_objectDetector = new YOLOv7ObjectDetector(
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.weights"),
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.cfg"),
Utils.getFilePath("OpenCVForUnity/dnn/coco.names"),
new Size(inpWidth, inpHeight), confThreshold, nmsThreshold/*, topK*/);
var modelFilePath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.OpenPose]);
if (string.IsNullOrEmpty(modelFilePath))
{
Debug.LogError("modelFilePath is empty. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
return;
}
_net = Dnn.readNet(modelFilePath);
_openPoseModel = new KeypointsModel(_net);
_openPoseModel.setInputScale(new Scalar(YogaConfig.InScale));
_openPoseModel.setInputSize(new Size(YogaConfig.InWidth, YogaConfig.InHeight));
_openPoseModel.setInputMean(new Scalar(YogaConfig.InMean));
_openPoseModel.setInputSwapRB(false);
_openPoseModel.setInputCrop(false);
}
private void Update()
{
if (!_isOnCamCapture)
{
return;
}
if (!transform.gameObject.activeSelf)
transform.gameObject.SetActive(true);
if (_webCamTextureToMatHelper.IsPlaying() && _webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat img = _webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
2023-11-07 16:51:42 +00:00
_rgbaMat = img.clone();
2023-11-07 13:55:35 +00:00
if (_net == null)
{
Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
}
2023-11-07 16:51:42 +00:00
if (_voloResult.Count >= 2)
DebugPrintObjectLayout(img, _voloResult[0], _voloResult[1], _voloResult[2]);
2023-11-07 13:55:35 +00:00
2023-11-12 15:09:33 +00:00
if (_currPersonPoints != null && _currPersonPoints.Count > 0)
2023-11-07 13:55:35 +00:00
{
2023-11-12 15:09:33 +00:00
List<Point> points = _currPersonPoints;
2023-11-07 13:55:35 +00:00
2023-11-07 16:51:42 +00:00
for (int i = 0; i < YogaConfig.POSE_PAIRS.GetLength(0); i++)
{
string partFrom = YogaConfig.POSE_PAIRS[i, 0];
string partTo = YogaConfig.POSE_PAIRS[i, 1];
2023-11-07 13:55:35 +00:00
2023-11-07 16:51:42 +00:00
int idFrom = YogaConfig.BODY_PARTS[partFrom];
int idTo = YogaConfig.BODY_PARTS[partTo];
2023-11-07 13:55:35 +00:00
2023-11-07 16:51:42 +00:00
if (points[idFrom] == new Point(-1, -1) || points[idTo] == new Point(-1, -1))
continue;
if (points[idFrom] != null && points[idTo] != null)
{
Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
}
2023-11-07 13:55:35 +00:00
}
}
2023-11-07 16:51:42 +00:00
2023-11-07 15:28:19 +00:00
//#endif
2023-11-07 13:55:35 +00:00
Utils.matToTexture2D(img, texture);
}
}
2023-11-12 15:09:33 +00:00
#region Event Func
private void OnStartMotionCapture()
{
this.enabled = true;
_isOnCamCapture = true;
CVEstimator.Instance.StartEstimation();//开始姿态检测
}
private void OnStopMotionCapture()
{
this.enabled = false;
_isOnCamCapture = false;
}
private async void GetActionBasePoint()
{
YogaManager.Instance.Points = _currPersonPoints;
//await Task.Run(() =>
//{
// int i = 0;
// //确保捕捉到该次检测所需所有的点位
// while (true)
// {
// YogaManager.Instance.Points = GetEstimationBodyPoints();
// _currPersonPoints = YogaManager.Instance.Points;
// if (YogaManager.Instance.ActionCheckPoints(YogaManager.Instance.Points))
// break;
// i++;
// Debug.LogWarning($"执行{i}次");
// }
//});
}
private async void EstimateAction(params object[] args)
2023-11-07 13:55:35 +00:00
{
2023-11-07 16:51:42 +00:00
var type = args.FirstOrDefault();
if (type == null)
{
Debug.LogError("EstimateAction type is null");
return;
}
AvatarAction actionType = (AvatarAction)args.FirstOrDefault();
2023-11-12 15:09:33 +00:00
//await Task.Run(() =>
//{
// _personPoints?.Clear();
// int i = 0;
// while (true)
// {
// //动作检测
// _personPoints = GetEstimationBodyPoints();
// if (YogaManager.Instance.ActionCheckPoints(_personPoints))
// break;
// i++;
// Debug.LogWarning($"执行{i}次");
// }
//});
//if (_personPoints == null || _personPoints.Count == 0)
//{
// Debug.LogWarning("EstimateAction _personPoints is null");
// return;
//}
//检测动作
_isCorrectAction = (_isCorrectAction || YogaManager.Instance.IsCorrectAction(_currPersonPoints, actionType));
2023-11-07 16:51:42 +00:00
}
2023-11-12 15:09:33 +00:00
#endregion
private List<Point> GetEstimationBodyPoints()
2023-11-07 16:51:42 +00:00
{
Mat rgbaMat = _rgbaMat;
2023-11-12 15:09:33 +00:00
Mat bgrMat = new Mat();
2023-11-07 16:51:42 +00:00
if (rgbaMat == null)
{
Debug.LogWarning("WebCamTexture is null. ");
2023-11-12 15:09:33 +00:00
return null;
}
2023-11-07 15:28:19 +00:00
2023-11-07 13:55:35 +00:00
if (_objectDetector == null)
2023-11-07 16:51:42 +00:00
{
Debug.LogWarning("ObjectDetector is not ready. ");
2023-11-12 15:09:33 +00:00
return null;
}
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
Mat results = _objectDetector.infer(bgrMat);
2023-11-07 16:51:42 +00:00
var voloResultBox = new List<float[]>();
2023-11-07 13:55:35 +00:00
2023-11-12 15:09:33 +00:00
bool hasValidObject = false;
2023-11-07 13:55:35 +00:00
for (int i = results.rows() - 1; i >= 0; --i)
{
float[] box = new float[4];
results.get(i, 0, box); //方框
float[] conf = new float[1];
results.get(i, 4, conf); //检测数据
float[] cls = new float[1];
results.get(i, 5, cls); //类别
2023-11-12 15:09:33 +00:00
if (!IsObjectValid(box, conf, cls, rgbaMat))
2023-11-07 13:55:35 +00:00
{
2023-11-12 15:09:33 +00:00
continue;
2023-11-07 13:55:35 +00:00
}
2023-11-12 15:09:33 +00:00
hasValidObject = true;
voloResultBox.Clear();
voloResultBox.Add(box);
voloResultBox.Add(conf);
voloResultBox.Add(cls);
break;
2023-11-07 13:55:35 +00:00
}
2023-11-12 15:09:33 +00:00
if (!hasValidObject) //没有检测到人体
return null;
2023-11-07 16:51:42 +00:00
//对人体进行姿态检测
_voloResult = voloResultBox;
2023-11-07 13:55:35 +00:00
2023-11-12 15:09:33 +00:00
OpenCVForUnity.CoreModule.Rect roiRect = new OpenCVForUnity.CoreModule.Rect(
(int)voloResultBox[0][0],
(int)voloResultBox[0][1],
Math.Abs((int)(voloResultBox[0][2] - voloResultBox[0][0])),
Math.Abs((int)(voloResultBox[0][3] - voloResultBox[0][1])));
if (roiRect.y < 0 || //0 <= _rowRange.start
(roiRect.y + roiRect.height) < roiRect.y || // _rowRange.start <= _rowRange.end
bgrMat.rows() < (roiRect.y + roiRect.height)) //_rowRange.end <= m.rows
return null;
2023-11-07 13:55:35 +00:00
2023-11-12 15:09:33 +00:00
List<Point> points = null;
try
2023-11-07 13:55:35 +00:00
{
2023-11-12 15:09:33 +00:00
Mat personRectImg = new Mat(bgrMat, roiRect);//获取人体区域
points = _openPoseModel.estimate(personRectImg, (float)threshold).toList();
//将人体区域的坐标转换为原图坐标
for (int j = 0; j < points.Count; j++)
{
if (points[j] == null ||
(points[j].x == -1 && points[j].y == -1)) //没找到的点,跳过
continue;
2023-11-07 13:55:35 +00:00
2023-11-12 15:09:33 +00:00
points[j].x += roiRect.x;
points[j].y += roiRect.y;
}
2023-11-07 13:55:35 +00:00
}
2023-11-12 15:09:33 +00:00
catch (Exception e)
{
Debug.LogException(e);
Debug.LogWarning("bgrMat:" + bgrMat.rows());
Debug.LogError("rect:" + roiRect);
}
bgrMat.Dispose();
return points;
2023-11-07 13:55:35 +00:00
}
2023-11-07 16:51:42 +00:00
private bool IsObjectValid(float[] box, float[] confidence, float[] classID, Mat rgbaMat)
2023-11-07 13:55:35 +00:00
{
if ((int)classID[0] != 0 || confidence[0] < 0.8f) //只检测人体且置信度大于80%
return false;
//是否满足主驾/副驾的位置条件
float width = rgbaMat.width();
//获取矩形的中点
float centerX = (box[0] + box[2]) / 2;
//左边副驾驶,右边主驾驶
return true;
}
private void DebugPrintObjectLayout(Mat image, float[] box, float[] conf, float[] cls, bool isRGB = false)
{
float left = box[0];
float top = box[1];
float right = box[2];
float bottom = box[3];
int classId = (int)cls[0];
Scalar c = _objectDetector.palette[classId % _objectDetector.palette.Count];
Scalar color = isRGB ? c : new Scalar(c.val[2], c.val[1], c.val[0], c.val[3]);
Imgproc.rectangle(image, new Point(left, top), new Point(right, bottom), color, 2);
string label = String.Format("{0:0.00}", conf[0]);
if (_objectDetector.classNames != null && _objectDetector.classNames.Count != 0)
{
if (classId < (int)_objectDetector.classNames.Count)
{
label = _objectDetector.classNames[classId] + " " + label;
}
}
int[] baseLine = new int[1];
Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
top = Mathf.Max((float)top, (float)labelSize.height);
Imgproc.rectangle(image, new Point(left, top - labelSize.height),
new Point(left + labelSize.width, top + baseLine[0]), color, Core.FILLED);
Imgproc.putText(image, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, Scalar.all(255), 1, Imgproc.LINE_AA);
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
_webCamTextureToMatHelper.Dispose();
if (_net != null)
_net.Dispose();
_bgrMat.Dispose();
_models.Clear();
2023-11-12 15:09:33 +00:00
CVEstimator.Instance.Dispose();
2023-11-07 13:55:35 +00:00
}
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = _webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(webCamTextureMat, texture);
this.gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols() / 10, webCamTextureMat.rows() / 10, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
_bgrMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
}
//event call
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (_bgrMat != null)
_bgrMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
public void ScoreUpdate()
{
2023-11-07 15:28:19 +00:00
if (_isCorrectAction)
{
2023-11-10 08:12:37 +00:00
EventManager.Instance.Dispatch(YogaEventType.ActionSuccess);
2023-11-07 13:55:35 +00:00
}
else
{
2023-11-10 08:12:37 +00:00
EventManager.Instance.Dispatch(YogaEventType.ActionFailed);
2023-11-07 13:55:35 +00:00
}
2023-11-07 16:51:42 +00:00
_isCorrectAction = false;//重置
2023-11-07 13:55:35 +00:00
}
}
}