2023-11-15 08:10:56 +00:00
using OpenCVForUnity.CoreModule ;
2023-11-15 17:23:16 +00:00
using OpenCVForUnity.ImgprocModule ;
using OpenCVForUnity.UnityUtils ;
using OpenCVForUnityExample.DnnModel ;
2023-11-15 08:10:56 +00:00
using System ;
using System.Collections.Generic ;
2023-11-15 17:23:16 +00:00
using UnityEngine ;
using UnityEngine.Experimental.GlobalIllumination ;
using static UnityEngine . Networking . UnityWebRequest ;
using OpenCVRange = OpenCVForUnity . CoreModule . Range ;
2023-11-15 08:10:56 +00:00
public class MediaPipeEstimator : Estimator
{
2023-11-15 17:23:16 +00:00
private MediaPipePersonDetector _personDetector ;
private MediaPipePoseEstimator _poseEstimator ;
private bool _mask = false ;
2023-11-15 08:10:56 +00:00
public override void InitModel ( )
{
2023-11-15 17:23:16 +00:00
var personDetectModelPath = Utils . getFilePath ( YogaConfig . MODEL_PATHS [ ModelType . MediapipePersonDetect ] ) ;
if ( string . IsNullOrEmpty ( personDetectModelPath ) )
{
Debug . LogError ( YogaConfig . MODEL_PATHS [ ModelType . MediapipePersonDetect ] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup." ) ;
}
else
{
_personDetector = new MediaPipePersonDetector ( personDetectModelPath , 0.3f , 0.6f , 5000 ) ;
}
var poseModelPath = Utils . getFilePath ( YogaConfig . MODEL_PATHS [ ModelType . MediapipePose ] ) ;
if ( string . IsNullOrEmpty ( poseModelPath ) )
{
Debug . LogError ( YogaConfig . MODEL_PATHS [ ModelType . MediapipePose ] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup." ) ;
}
else
{
_poseEstimator = new MediaPipePoseEstimator ( poseModelPath , 0.9f ) ;
}
}
public override void Check ( ref Mat img )
{
if ( _personDetector = = null | | _poseEstimator = = null )
{
Imgproc . putText ( img , "model file is not loaded." , new Point ( 5 , img . rows ( ) - 30 ) , Imgproc . FONT_HERSHEY_SIMPLEX , 0.7 , new Scalar ( 255 , 255 , 255 , 255 ) , 2 , Imgproc . LINE_AA , false ) ;
Imgproc . putText ( img , "Please read console message." , new Point ( 5 , img . rows ( ) - 10 ) , Imgproc . FONT_HERSHEY_SIMPLEX , 0.7 , new Scalar ( 255 , 255 , 255 , 255 ) , 2 , Imgproc . LINE_AA , false ) ;
}
}
2023-11-17 06:44:27 +00:00
public override bool Esitmate ( Mat bgrMat , Mat rgbaMat , out List < Point > points )
2023-11-15 17:23:16 +00:00
{
2023-11-17 06:44:27 +00:00
points = null ;
Mat result = _personDetector . infer ( bgrMat ) ;
2023-11-15 17:23:16 +00:00
2023-11-21 10:28:27 +00:00
if ( result . rows ( ) = = 0 )
return false ;
2023-11-15 17:23:16 +00:00
for ( int i = 0 ; i < result . rows ( ) ; + + i )
{
List < Mat > results = _poseEstimator . infer ( bgrMat , result . row ( i ) ) ;
//_poseEstimator.visualize(rgbaMat, results[0], false, false);
points = GetKeypoints ( results [ 0 ] ) ;
if ( points ! = null & & YogaManager . Instance . ActionCheckPoints ( points ) )
{
return true ;
}
}
//没有检测到人体
return false ;
}
2023-11-17 06:44:27 +00:00
public override void DebugPrint ( Mat img , bool isRGB = false )
{
//不用volo 打印模式变更 TODO
}
2023-11-15 17:23:16 +00:00
private List < Point > GetKeypoints ( Mat result )
{
var retVal = new List < Point > ( ) ;
if ( result . empty ( ) | | result . rows ( ) < 317 )
return retVal ;
//获取关键点
float [ ] conf = new float [ 1 ] ;
result . get ( 316 , 0 , conf ) ;
float [ ] bbox = new float [ 4 ] ;
result . get ( 0 , 0 , bbox ) ;
int auxiliary_points_num = 6 ;
Mat results_col4_199_39x5 = result . rowRange ( new OpenCVRange ( 4 , 199 - ( 5 * auxiliary_points_num ) ) ) . reshape ( 1 , 39 - auxiliary_points_num ) ;
float [ ] landmarks_screen_xy = new float [ ( 39 - auxiliary_points_num ) * 2 ] ;
results_col4_199_39x5 . colRange ( new OpenCVRange ( 0 , 2 ) ) . get ( 0 , 0 , landmarks_screen_xy ) ;
float [ ] landmarks_screen_xyz = new float [ ( 39 - auxiliary_points_num ) * 3 ] ;
results_col4_199_39x5 . colRange ( new OpenCVRange ( 0 , 3 ) ) . get ( 0 , 0 , landmarks_screen_xyz ) ;
// # only show visible keypoints which presence bigger than 0.8
float [ ] landmarks_presence = new float [ ( 39 - auxiliary_points_num ) ] ;
results_col4_199_39x5 . colRange ( new OpenCVRange ( 4 , 5 ) ) . get ( 0 , 0 , landmarks_presence ) ;
Mat results_col199_316_39x3 = result . rowRange ( new OpenCVRange ( 199 , 316 - ( 3 * auxiliary_points_num ) ) ) . reshape ( 1 , 39 - auxiliary_points_num ) ;
float [ ] landmarks_world = new float [ ( 39 - auxiliary_points_num ) * 3 ] ;
results_col199_316_39x3 . get ( 0 , 0 , landmarks_world ) ;
//将关键点映射到现有的open pose关键点上
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 0 ) ) ; //Nose
retVal . Add ( new Point ( - 1 , - 1 ) ) ; //Neck
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 11 ) ) ; //LShoulder
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 13 ) ) ; //LElbow
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 15 ) ) ; //LWrist
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 12 ) ) ; //RShoulder
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 14 ) ) ; //RElbow
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 16 ) ) ; //RWrist
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 23 ) ) ; //LHip
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 25 ) ) ; //LKnee
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 27 ) ) ; //LAnkle
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 24 ) ) ; //RHip
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 26 ) ) ; //RKnee
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 28 ) ) ; //RAnkle
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 2 ) ) ; //LEye
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 5 ) ) ; //REye
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 7 ) ) ; //LEar
retVal . Add ( GetPointData ( landmarks_screen_xy , landmarks_presence , 8 ) ) ; //REar
retVal . Add ( new Point ( - 1 , - 1 ) ) ; //Background
return retVal ;
2023-11-15 08:10:56 +00:00
}
2023-11-15 17:23:16 +00:00
private Point GetPointData ( float [ ] landmarks , float [ ] _landmarks_presence , int index )
2023-11-15 08:10:56 +00:00
{
2023-11-15 17:23:16 +00:00
if ( _landmarks_presence [ index ] < 0.8f )
return null ;
index = index * 2 ;
return new Point ( landmarks [ index ] , landmarks [ index + 1 ] ) ;
2023-11-15 08:10:56 +00:00
}
public override void DisposeModel ( )
{
2023-11-15 17:23:16 +00:00
2023-11-15 08:10:56 +00:00
}
}