Health/Assets/Scripts/CVEstimator.cs

214 lines
6.6 KiB
C#
Raw Normal View History

2023-11-12 15:09:33 +00:00
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections.Generic;
using System.Threading;
using UnityEngine;
using Yoga;
public class CVEstimator : Singleton<CVEstimator>
{
private bool isInited = false;
public bool IsInited { get => isInited; private set => isInited = value; }
public bool IsRunning { get; private set; }
private Thread _mainThread;
private Thread _estimateThread;
private static readonly TimeSpan _refreshRate = TimeSpan.FromSeconds(1 / 30);
public void Init(params object[] args)
{
if (IsInited)
{
return;
}
if (_mainThread != null)
{
_mainThread.Abort();
_mainThread = null;
}
//先用YOLOv7检测人体再用OpenPose检测人体姿态
if (args.Length < 2)
throw new ArgumentException("CVEstimator.Init: args.Length < 2");
_objectDetector = args[0] as YOLOv7ObjectDetector;
if (_objectDetector == null)
throw new ArgumentException("CVEstimator.Init: args[0] is not YOLOv7ObjectDetector");
_openPoseModel = args[1] as KeypointsModel;
if (_openPoseModel == null)
throw new ArgumentException("CVEstimator.Init: args[1] is not KeypointsModel");
_mainThread = new Thread(new ThreadStart(MainThread));
_mainThread.Start();
IsInited = true;
}
public void StartEstimation()
{
if (!IsInited)
{
Debug.LogError("CVEstimator is not inited. ");
return;
}
_estimateThread.Start();
if (IsRunning)
{
return;
}
IsRunning = true;
}
public void Dispose()
{
if (!IsInited)
{
return;
}
IsRunning = false;
_estimateThread.Abort();
_mainThread.Abort();
IsInited = false;
}
private YOLOv7ObjectDetector _objectDetector;
public int inpWidth = 416;
public int inpHeight = 416;
public float confThreshold = 0.35f;
public float nmsThreshold = 0.6f;
public int topK = 1000;
private double threshold = 0.5;
private Net _net;
private KeypointsModel _openPoseModel;
private void MainThread()
{
//加载模型
Utils.setDebugMode(true); //打印日志
//解析数据
_estimateThread = new Thread(new ThreadStart(Estimation));
}
private void Estimation()
{
DateTime startTime = DateTime.Now;
Mat bgrMat = new Mat();
while (IsRunning)
{
if (_objectDetector == null)
{
Debug.LogWarning("ObjectDetector is not ready. ");
IsRunning = false; //停止检测
}
//获取数据
Mat rgbaMat = MotionCaptureManager.Instance.RgbaMat;
if (rgbaMat == null)
{
2023-11-13 02:53:34 +00:00
Debug.Log("WebCamTexture is null. ");
Debug.Log("Re-Estimation.");
2023-11-12 15:09:33 +00:00
continue; //重新检测
}
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
Mat results = _objectDetector.infer(bgrMat);
var voloResultBox = new List<float[]>();
bool hasValidObject = false;
for (int i = results.rows() - 1; i >= 0; --i)
{
float[] box = new float[4];
results.get(i, 0, box); //方框
float[] conf = new float[1];
results.get(i, 4, conf); //检测数据
float[] cls = new float[1];
results.get(i, 5, cls); //类别
if (!IsObjectValid(box, conf, cls, rgbaMat))
{
continue;
}
hasValidObject = true;
voloResultBox.Clear();
voloResultBox.Add(box);
voloResultBox.Add(conf);
voloResultBox.Add(cls);
break;
}
if (!hasValidObject) //没有检测到人体
{
2023-11-13 02:53:34 +00:00
Debug.Log("No person block found. Re-Estimation.");
2023-11-12 15:09:33 +00:00
continue; //重新检测
}
//更新人物框检测结果
MotionCaptureManager.Instance.VoloResult = voloResultBox;
OpenCVForUnity.CoreModule.Rect roiRect = new OpenCVForUnity.CoreModule.Rect(
(int)voloResultBox[0][0],
(int)voloResultBox[0][1],
Math.Abs((int)(voloResultBox[0][2] - voloResultBox[0][0])),
Math.Abs((int)(voloResultBox[0][3] - voloResultBox[0][1])));
if (roiRect.y < 0 || //0 <= _rowRange.start
(roiRect.y + roiRect.height) < roiRect.y || // _rowRange.start <= _rowRange.end
bgrMat.rows() < (roiRect.y + roiRect.height)) //_rowRange.end <= m.rows
continue; //重新检测
Mat personRectImg = new Mat(bgrMat, roiRect);//获取人体区域
List<Point> points = _openPoseModel.estimate(personRectImg, (float)threshold).toList();
//将人体区域的坐标转换为原图坐标
for (int j = 0; j < points.Count; j++)
{
if (points[j] == null ||
(points[j].x == -1 && points[j].y == -1)) //没找到的点,跳过
continue;
points[j].x += roiRect.x;
points[j].y += roiRect.y;
}
2023-11-13 02:53:34 +00:00
if (!YogaManager.Instance.ActionCheckPoints(points))
{
Debug.Log("ActionCheckPoints failed. Re-Estimation.");
continue; //重新检测
}
2023-11-12 15:09:33 +00:00
//更新关键点位检测结果
MotionCaptureManager.Instance.CurrPersonPoints = points;
//等待到下一个更新周期
if (DateTime.Now - startTime > _refreshRate)
{
startTime = DateTime.Now;
}
else
{
Thread.Sleep(_refreshRate - (DateTime.Now - startTime));
}
}
bgrMat.Dispose();
}
private bool IsObjectValid(float[] box, float[] confidence, float[] classID, Mat rgbaMat)
{
if ((int)classID[0] != 0 || confidence[0] < 0.8f) //只检测人体且置信度大于80%
return false;
//是否满足主驾/副驾的位置条件
float width = rgbaMat.width();
//获取矩形的中点
float centerX = (box[0] + box[2]) / 2;
//左边副驾驶,右边主驾驶
return true;
}
}