Health/Assets/Scripts/PoseCheck/EstimateModel/CVEstimator.cs

140 lines
3.8 KiB
C#
Raw Normal View History

2023-11-15 08:10:56 +00:00
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections.Generic;
using System.Threading;
using UnityEngine;
using Yoga;
public class CVEstimator : Singleton<CVEstimator>
{
private bool isInited = false;
public bool IsInited { get => isInited; private set => isInited = value; }
public bool IsRunning { get; private set; }
private Thread _mainThread;
private Thread _estimateThread;
private static readonly TimeSpan _refreshRate = TimeSpan.FromSeconds(1 / 30);
//public int inpWidth = 416;
//public int inpHeight = 416;
//public float confThreshold = 0.35f;
//public float nmsThreshold = 0.6f;
//public int topK = 1000;
//private double threshold = 0.5;
private Net _net;
private KeypointsModel _openPoseModel;
public void Init(params object[] args)
{
if (IsInited)
{
return;
}
if (_mainThread != null)
{
_mainThread.Abort();
_mainThread = null;
}
//先用YOLOv7检测人体再用OpenPose检测人体姿态
//if (args.Length < 1)
// throw new ArgumentException("CVEstimator.Init: args.Length < 2");
//_objectDetector = args[0] as YOLOv7ObjectDetector;
//if (_objectDetector == null)
// throw new ArgumentException("CVEstimator.Init: args[0] is not YOLOv7ObjectDetector");
//_openPoseModel = args[1] as KeypointsModel;
//if (_openPoseModel == null)
// throw new ArgumentException("CVEstimator.Init: args[1] is not KeypointsModel");
_mainThread = new Thread(new ThreadStart(MainThread));
_mainThread.Start();
IsInited = true;
}
public void StartEstimation()
{
if (!IsInited)
{
Debug.LogError("CVEstimator is not inited. ");
return;
}
_estimateThread.Start();
if (IsRunning)
{
return;
}
IsRunning = true;
}
public void Dispose()
{
if (!IsInited)
{
return;
}
IsRunning = false;
_estimateThread.Abort();
_mainThread.Abort();
IsInited = false;
}
private void MainThread()
{
//加载模型
Utils.setDebugMode(true); //打印日志
//解析数据
_estimateThread = new Thread(new ThreadStart(Estimation));
}
private void Estimation()
{
DateTime startTime = DateTime.Now;
Mat bgrMat = new Mat();
while (IsRunning)
{
Mat rgbaMat = MotionCaptureManager.Instance.RgbaMat;
if (rgbaMat == null)
{
Debug.Log("WebCamTexture is null. ");
Debug.Log("Re-Estimation.");
continue; //重新检测
}
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
2023-11-15 17:23:16 +00:00
if (!YogaManager.Instance.CurrentEstimator.TryGetROIRegionMat(bgrMat, rgbaMat, out Mat result))
2023-11-15 08:10:56 +00:00
{
continue; //重新检测
}
2023-11-15 17:23:16 +00:00
if (!YogaManager.Instance.CurrentEstimator.Esitmate(bgrMat, rgbaMat, result, out List<Point> points))
2023-11-15 08:10:56 +00:00
{
Debug.LogWarning("Pose estimation failed. Re-Estimating.");
continue; //重新检测
}
//更新关键点位检测结果
MotionCaptureManager.Instance.CurrPersonPoints = points;
//等待到下一个更新周期
if (DateTime.Now - startTime > _refreshRate)
{
startTime = DateTime.Now;
}
else
{
Thread.Sleep(_refreshRate - (DateTime.Now - startTime));
}
}
bgrMat.Dispose();
}
}