Health/Assets/Scripts/PoseCheck/MotionCaptureManager.cs

410 lines
15 KiB
C#
Raw Normal View History

2023-11-07 13:55:35 +00:00
#undef DEBUG_MODE
#define DEBUG_MODE
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.Video;
namespace Yoga
{
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class MotionCaptureManager : MonoSingleton<MotionCaptureManager>
{
private FpsMonitor _fpsMonitor;
private WebCamTextureToMatHelper _webCamTextureToMatHelper;
private bool isInited = false;
private Texture2D texture;
private Net _net;
private Dictionary<ModelType, KeypointsModel> _models = new Dictionary<ModelType, KeypointsModel>();
private KeypointsModel _openPoseModel;
private const string model = "nanodet-plus-m_416.onnx";
private const string config = "";
private const string classes = "coco.names";
public int inpWidth = 416;
public int inpHeight = 416;
public float confThreshold = 0.35f;
public float nmsThreshold = 0.6f;
public int topK = 1000;
public float SamplingRate = 0.3f;
private List<float[]> _voloResult = new List<float[]>();
private List<Point> _personPoints = new List<Point>();
private YOLOv7ObjectDetector _objectDetector;
private Mat _bgrMat;
private double threshold = 0.5;
private float _involkCDTime = 0;
private Coroutine _objEstimation;
private Coroutine _actionEstimation;
private bool _isOnCamCapture = false;
public bool IsOnCamCapture { get => _isOnCamCapture; internal set => _isOnCamCapture = value; }
private void OnEnable()
{
EventManager.Instance.AddEventListener("StartMotionCapture", OnStartMotionCapture);
2023-11-07 15:28:19 +00:00
EventManager.Instance.AddEventListener("EstimateAction", EstimateAction);
EventManager.Instance.AddEventListener("ScoreUpdate", ScoreUpdate);
2023-11-07 13:55:35 +00:00
}
2023-11-07 15:28:19 +00:00
private void OnDisable()
{
EventManager.Instance.RemoveEventListener("StartMotionCapture", OnStartMotionCapture);
EventManager.Instance.RemoveEventListener("EstimateAction", EstimateAction);
EventManager.Instance.RemoveEventListener("ScoreUpdate", ScoreUpdate);
}
private void EstimateAction()
{
//检测动作
}
2023-11-07 13:55:35 +00:00
private void OnStartMotionCapture()
{
this.enabled = true;
_isOnCamCapture = true;
}
public override void Init()
{
base.Init();
_fpsMonitor = GetComponent<FpsMonitor>();
_webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
_webCamTextureToMatHelper.Initialize();
2023-11-07 15:28:19 +00:00
YogaManager.Instance.InitData();
2023-11-07 13:55:35 +00:00
//动作引导界面
var video = Resources.Load<VideoClip>("Video/Action1");
UIManager.Instance.Init();
UIManager.Instance.ShowPanel<ActionGuideVideoPanel>(false, video);
LoadModels();//加载模型
}
private void LoadModels()
{
Utils.setDebugMode(true); //打印日志
Utils.setDebugMode(false);
_net = null;
//先用YOLOv7检测人体再用OpenPose检测人体姿态
_objectDetector = new YOLOv7ObjectDetector(
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.weights"),
Utils.getFilePath("OpenCVForUnity/dnn/yolov7-tiny.cfg"),
Utils.getFilePath("OpenCVForUnity/dnn/coco.names"),
new Size(inpWidth, inpHeight), confThreshold, nmsThreshold/*, topK*/);
var modelFilePath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.OpenPose]);
if (string.IsNullOrEmpty(modelFilePath))
{
Debug.LogError("modelFilePath is empty. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
return;
}
_net = Dnn.readNet(modelFilePath);
_openPoseModel = new KeypointsModel(_net);
_openPoseModel.setInputScale(new Scalar(YogaConfig.InScale));
_openPoseModel.setInputSize(new Size(YogaConfig.InWidth, YogaConfig.InHeight));
_openPoseModel.setInputMean(new Scalar(YogaConfig.InMean));
_openPoseModel.setInputSwapRB(false);
_openPoseModel.setInputCrop(false);
}
private void Update()
{
if (!_isOnCamCapture)
{
return;
}
if (!transform.gameObject.activeSelf)
transform.gameObject.SetActive(true);
if (_webCamTextureToMatHelper.IsPlaying() && _webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat img = _webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
_involkCDTime += Time.deltaTime;
if (_involkCDTime > SamplingRate)
{
if (_objEstimation != null)
StopCoroutine(_objEstimation);
_objEstimation = StartCoroutine(ObjectEstimation(img));
_involkCDTime = 0;
}
2023-11-07 15:28:19 +00:00
//#if DEBUG_MODE && UNITY_EDITOR
2023-11-07 13:55:35 +00:00
if (_net == null)
{
Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
}
DebugPrintObjectLayout(img, _voloResult[0], _voloResult[1], _voloResult[2]);
List<Point> points = _personPoints;
for (int i = 0; i < YogaConfig.POSE_PAIRS.GetLength(0); i++)
{
string partFrom = YogaConfig.POSE_PAIRS[i, 0];
string partTo = YogaConfig.POSE_PAIRS[i, 1];
int idFrom = YogaConfig.BODY_PARTS[partFrom];
int idTo = YogaConfig.BODY_PARTS[partTo];
if (points[idFrom] == new Point(-1, -1) || points[idTo] == new Point(-1, -1))
continue;
if (points[idFrom] != null && points[idTo] != null)
{
Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
}
}
2023-11-07 15:28:19 +00:00
//#endif
2023-11-07 13:55:35 +00:00
Utils.matToTexture2D(img, texture);
}
}
private IEnumerator ObjectEstimation(Mat rgbaMat)
{
if (!_webCamTextureToMatHelper.IsPlaying() || !_webCamTextureToMatHelper.DidUpdateThisFrame())
yield break;
2023-11-07 15:28:19 +00:00
2023-11-07 13:55:35 +00:00
if (_objectDetector == null)
yield break;
Imgproc.cvtColor(rgbaMat, _bgrMat, Imgproc.COLOR_RGBA2BGR);
Mat results = _objectDetector.infer(_bgrMat);
for (int i = results.rows() - 1; i >= 0; --i)
{
float[] box = new float[4];
results.get(i, 0, box); //方框
float[] conf = new float[1];
results.get(i, 4, conf); //检测数据
float[] cls = new float[1];
results.get(i, 5, cls); //类别
var rectResult = new List<float[]>() { box, conf, cls };
if (CheckResults(box, conf, cls, rgbaMat))
{
_voloResult.Clear();
_voloResult.Add(box);
_voloResult.Add(conf);
_voloResult.Add(cls);
break;
}
}
if (_actionEstimation != null)
StopCoroutine(_actionEstimation);
_actionEstimation = StartCoroutine(ActionEstimation(rgbaMat));
}
private IEnumerator ActionEstimation(Mat rgbaMat)
{
if (!_webCamTextureToMatHelper.IsPlaying() || !_webCamTextureToMatHelper.DidUpdateThisFrame())
yield break;
if (_voloResult == null)
yield break;
2023-11-07 15:28:19 +00:00
2023-11-07 13:55:35 +00:00
var box = _voloResult[0];
//对人体进行姿态检测
2023-11-07 15:28:19 +00:00
OpenCVForUnity.CoreModule.Rect rect = new OpenCVForUnity.CoreModule.Rect((int)box[0], (int)box[1], (int)(box[2] - box[0]), (int)(box[3] - box[1]));
2023-11-07 13:55:35 +00:00
Mat personRectImg = new Mat(_bgrMat, rect);//获取人体区域
_personPoints = _openPoseModel.estimate(personRectImg, (float)threshold).toList();
//将人体区域的坐标转换为原图坐标
for (int j = 0; j < _personPoints.Count; j++)
{
if (_personPoints[j] == null ||
(_personPoints[j].x == -1 && _personPoints[j].y == -1)) //没找到的点,跳过
continue;
_personPoints[j].x += rect.x;
_personPoints[j].y += rect.y;
}
//检测动作
//YogaManager.Instance.ProcessPoints(_personPoints);
}
2023-11-07 15:28:19 +00:00
private bool CheckResults(float[] box, float[] confidence, float[] classID, Mat rgbaMat)
2023-11-07 13:55:35 +00:00
{
if ((int)classID[0] != 0 || confidence[0] < 0.8f) //只检测人体且置信度大于80%
return false;
//是否满足主驾/副驾的位置条件
float width = rgbaMat.width();
//获取矩形的中点
float centerX = (box[0] + box[2]) / 2;
//左边副驾驶,右边主驾驶
return true;
}
private void DebugPrintObjectLayout(Mat image, float[] box, float[] conf, float[] cls, bool isRGB = false)
{
float left = box[0];
float top = box[1];
float right = box[2];
float bottom = box[3];
int classId = (int)cls[0];
Scalar c = _objectDetector.palette[classId % _objectDetector.palette.Count];
Scalar color = isRGB ? c : new Scalar(c.val[2], c.val[1], c.val[0], c.val[3]);
Imgproc.rectangle(image, new Point(left, top), new Point(right, bottom), color, 2);
string label = String.Format("{0:0.00}", conf[0]);
if (_objectDetector.classNames != null && _objectDetector.classNames.Count != 0)
{
if (classId < (int)_objectDetector.classNames.Count)
{
label = _objectDetector.classNames[classId] + " " + label;
}
}
int[] baseLine = new int[1];
Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
top = Mathf.Max((float)top, (float)labelSize.height);
Imgproc.rectangle(image, new Point(left, top - labelSize.height),
new Point(left + labelSize.width, top + baseLine[0]), color, Core.FILLED);
Imgproc.putText(image, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, Scalar.all(255), 1, Imgproc.LINE_AA);
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
_webCamTextureToMatHelper.Dispose();
if (_net != null)
_net.Dispose();
_bgrMat.Dispose();
//删除已加载的各种模型
_models.Clear();
//YogaManager.Instance.Dispose();
}
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = _webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(webCamTextureMat, texture);
this.gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols() / 10, webCamTextureMat.rows() / 10, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (_fpsMonitor != null)
{
_fpsMonitor.Add("width", _webCamTextureToMatHelper.GetWidth().ToString());
_fpsMonitor.Add("height", _webCamTextureToMatHelper.GetHeight().ToString());
_fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
_bgrMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
}
//event call
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (_bgrMat != null)
_bgrMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
private bool _isCorrectAction = false;
2023-11-07 15:28:19 +00:00
private int _maxActionCount = 3;
2023-11-07 13:55:35 +00:00
public void ScoreUpdate()
{
2023-11-07 15:28:19 +00:00
//如果达到最大数,则停止检测,跳转到下一个动作/下一个动作引导/奖励界面
if (YogaManager.Instance.CurrentActionCount >= _maxActionCount)
2023-11-07 13:55:35 +00:00
{
2023-11-07 15:28:19 +00:00
//停止检测
_isOnCamCapture = false;
//跳转到下一个动作/下一个动作引导/奖励界面
UIManager.Instance.ShowPanel<ClearingSettlementUI>(false, null);
return;
}
2023-11-07 13:55:35 +00:00
2023-11-07 15:28:19 +00:00
if (_isCorrectAction)
{
EventManager.Instance.Dispatch("ActionSuccess");
YogaManager.Instance.CurrentSuccessActionCount++;
2023-11-07 13:55:35 +00:00
}
else
{
2023-11-07 15:28:19 +00:00
EventManager.Instance.Dispatch("ActionFailed");
2023-11-07 13:55:35 +00:00
}
2023-11-07 15:28:19 +00:00
YogaManager.Instance.CurrentActionCount++;
2023-11-07 13:55:35 +00:00
}
}
}