using OpenCVForUnity.CoreModule; using OpenCVForUnity.DnnModule; using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils.Helper; using System; using System.Collections.Generic; using System.Threading; using UnityEngine; using Yoga; public class CVEstimator : Singleton { private bool isInited = false; public bool IsInited { get => isInited; private set => isInited = value; } public bool IsRunning { get; private set; } private Thread _mainThread; private Thread _estimateThread; private static readonly TimeSpan _refreshRate = TimeSpan.FromSeconds(1 / 30); //public int inpWidth = 416; //public int inpHeight = 416; //public float confThreshold = 0.35f; //public float nmsThreshold = 0.6f; //public int topK = 1000; //private double threshold = 0.5; private Net _net; private KeypointsModel _openPoseModel; public void Init(params object[] args) { if (IsInited) { return; } if (_mainThread != null) { _mainThread.Abort(); _mainThread = null; } //先用YOLOv7检测人体,再用OpenPose检测人体姿态 //if (args.Length < 1) // throw new ArgumentException("CVEstimator.Init: args.Length < 2"); //_objectDetector = args[0] as YOLOv7ObjectDetector; //if (_objectDetector == null) // throw new ArgumentException("CVEstimator.Init: args[0] is not YOLOv7ObjectDetector"); //_openPoseModel = args[1] as KeypointsModel; //if (_openPoseModel == null) // throw new ArgumentException("CVEstimator.Init: args[1] is not KeypointsModel"); _mainThread = new Thread(new ThreadStart(MainThread)); _mainThread.Start(); IsInited = true; } public void StartEstimation() { if (!IsInited) { Debug.LogError("CVEstimator is not inited. "); return; } _estimateThread.Start(); if (IsRunning) { return; } IsRunning = true; } public void Dispose() { if (!IsInited) { return; } IsRunning = false; _estimateThread.Abort(); _mainThread.Abort(); IsInited = false; } private void MainThread() { //加载模型 Utils.setDebugMode(true); //打印日志 //解析数据 _estimateThread = new Thread(new ThreadStart(Estimation)); } private void Estimation() { DateTime startTime = DateTime.Now; Mat bgrMat = new Mat(); while (IsRunning) { Mat rgbaMat = MotionCaptureManager.Instance.RgbaMat; if (rgbaMat == null) { Debug.Log("WebCamTexture is null. "); Debug.Log("Re-Estimation."); continue; //重新检测 } Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR); if (!YogaManager.Instance.CurrentEstimator.TryGetROIRegionMat(rgbaMat, bgrMat, out List voloResultBox)) { continue; //重新检测 } //更新人物框检测结果 MotionCaptureManager.Instance.VoloResult = voloResultBox; if (!YogaManager.Instance.CurrentEstimator.Esitmate(bgrMat, voloResultBox, out List points)) { Debug.LogWarning("Pose estimation failed. Re-Estimating."); continue; //重新检测 } //更新关键点位检测结果 MotionCaptureManager.Instance.CurrPersonPoints = points; //等待到下一个更新周期 if (DateTime.Now - startTime > _refreshRate) { startTime = DateTime.Now; } else { Thread.Sleep(_refreshRate - (DateTime.Now - startTime)); } } bgrMat.Dispose(); } }