Merge branch 'dev/demo' of http://47.92.207.105:3000/terric/Health into dev/demo

This commit is contained in:
terric 2023-11-24 22:17:53 +08:00
commit 897469e3a9
100 changed files with 5014 additions and 231 deletions

View File

@ -43,8 +43,8 @@ public class LLM:MonoBehaviour
//提示词处理
string message =
m_Prompt +
" 回答的语言:" + lan +
" 接下来是我的提问:" + _msg;
" The language of reply is " + lan +
" here's my question:" + _msg;
UnityEngine.Debug.Log("玩家post" + message);
//缓存发送的信息列表

View File

@ -7,7 +7,10 @@ using UnityEngine.Networking;
public class AzureTextToSpeech : TTS
{
public AudioClip saySorry;
public AudioClip lookBad;
public AudioClip[] sayHello;
public AudioClip lookHappy;
public AudioClip lookSurprise;
#region
/// <summary>
/// Azure配置项
@ -55,10 +58,22 @@ public class AzureTextToSpeech : TTS
{
callback(sayHello[UnityEngine.Random.Range(0, sayHello.Length)]);
}
else
else if(presetAudio == PresetAudio.SorrySaid)
{
callback(saySorry);
}
else if(presetAudio == PresetAudio.LookBad)
{
callback(lookBad);
}
else if (presetAudio == PresetAudio.LookHappy)
{
callback(lookHappy);
}
else if (presetAudio == PresetAudio.LookSurprised)
{
callback(lookSurprise);
}
}

View File

@ -9,7 +9,10 @@ public class TTS : MonoBehaviour
public enum PresetAudio
{
SayHi,
SorrySaid
SorrySaid,
LookBad,
LookSurprised,
LookHappy
}
/// <summary>
/// 语音合成的api地址

View File

@ -0,0 +1,26 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace RichFrame
{
/// <summary>
/// 单例
/// </summary>
/// <typeparam name="T"></typeparam>
public class Singleton<T> : MonoBehaviour where T : MonoBehaviour
{
public static T Instance
{
get
{
if (_Instance == null)
{
_Instance = FindObjectOfType<T>();
}
return _Instance;
}
}
static T _Instance;
}
}

View File

@ -120,6 +120,175 @@ NavMeshSettings:
debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &42567472
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 42567473}
- component: {fileID: 42567479}
- component: {fileID: 42567477}
- component: {fileID: 42567476}
- component: {fileID: 42567480}
m_Layer: 0
m_Name: OpenCV
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &42567473
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 42567472}
serializedVersion: 2
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.1619103, y: 1.36112, z: 2.4244072}
m_LocalScale: {x: 640, y: 480, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 725449907}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &42567476
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 42567472}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: df35b0c19ca97734e87299a664cea35f, type: 3}
m_Name:
m_EditorClassIdentifier:
_requestedDeviceName:
_requestedWidth: 640
_requestedHeight: 480
_requestedIsFrontFacing: 0
_requestedFPS: 30
_rotate90Degree: 0
_flipVertical: 0
_flipHorizontal: 0
_outputColorFormat: 3
_timeoutFrameCount: 1500
onInitialized:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 42567480}
m_TargetAssemblyTypeName: OpenCVForUnityExample.MFacialExpressionRecognition,
Assembly-CSharp
m_MethodName: OnWebCamTextureToMatHelperInitialized
m_Mode: 1
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
onDisposed:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 42567480}
m_TargetAssemblyTypeName: OpenCVForUnityExample.MFacialExpressionRecognition,
Assembly-CSharp
m_MethodName: OnWebCamTextureToMatHelperDisposed
m_Mode: 1
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
onErrorOccurred:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 42567480}
m_TargetAssemblyTypeName: OpenCVForUnityExample.MFacialExpressionRecognition,
Assembly-CSharp
m_MethodName: OnWebCamTextureToMatHelperErrorOccurred
m_Mode: 0
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
avoidAndroidFrontCameraLowLightIssue: 0
--- !u!23 &42567477
MeshRenderer:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 42567472}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_StaticShadowCaster: 0
m_MotionVectors: 1
m_LightProbeUsage: 0
m_ReflectionProbeUsage: 1
m_RayTracingMode: 2
m_RayTraceProcedural: 0
m_RenderingLayerMask: 1
m_RendererPriority: 0
m_Materials:
- {fileID: 2100000, guid: d5c2a09c8f8d079458801d608271b579, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_ReceiveGI: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
m_AdditionalVertexStreams: {fileID: 0}
--- !u!33 &42567479
MeshFilter:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 42567472}
m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0}
--- !u!114 &42567480
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 42567472}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 956d8fd069f661f4aa9eb1055d305572, type: 3}
m_Name:
m_EditorClassIdentifier:
testInputImage:
--- !u!1 &69452801
GameObject:
m_ObjectHideFlags: 0
@ -430,7 +599,7 @@ Transform:
m_Children:
- {fileID: 1370441789}
- {fileID: 1864357025}
m_Father: {fileID: 0}
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &178736804
GameObject:
@ -904,6 +1073,7 @@ GameObject:
m_Component:
- component: {fileID: 334146834}
- component: {fileID: 334146835}
- component: {fileID: 334146836}
m_Layer: 0
m_Name: ChatBox
m_TagString: Untagged
@ -919,9 +1089,9 @@ Transform:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 334146833}
serializedVersion: 2
m_LocalRotation: {x: -0, y: 0.20791169, z: -0, w: 0.9781476}
m_LocalPosition: {x: -1.2509742, y: 1.49, z: 0.16220574}
m_LocalScale: {x: 0.59341, y: 0.59341, z: 0.59341}
m_LocalRotation: {x: 7.912123e-10, y: 0.2079117, z: 0.0000000037223615, w: 0.9781476}
m_LocalPosition: {x: -1.2509742, y: 1.4899998, z: 0.16220571}
m_LocalScale: {x: 0.5934101, y: 0.59340996, z: 0.59340996}
m_ConstrainProportionsScale: 0
m_Children:
- {fileID: 1260374842}
@ -979,6 +1149,19 @@ SpriteRenderer:
m_WasSpriteAssigned: 1
m_MaskInteraction: 0
m_SpriteSortPoint: 0
--- !u!114 &334146836
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 334146833}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d7a321df2a6cc0143875c6264f77afea, type: 3}
m_Name:
m_EditorClassIdentifier:
textMeshPro: {fileID: 1260374843}
--- !u!95 &343958444 stripped
Animator:
m_CorrespondingSourceObject: {fileID: 5866666021909216657, guid: 659354b3e275286478812f96994daf50, type: 3}
@ -1385,7 +1568,7 @@ AudioSource:
Spatialize: 0
SpatializePostEffects: 0
Priority: 128
DopplerLevel: 1
DopplerLevel: 0
MinDistance: 1
MaxDistance: 500
Pan2D: 0
@ -1437,7 +1620,7 @@ AudioSource:
m_Curve:
- serializedVersion: 3
time: 0
value: 0
value: 1
inSlope: 0
outSlope: 0
tangentMode: 0
@ -1470,12 +1653,12 @@ Transform:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 489438414}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 0}
m_Father: {fileID: 994815019}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &520604338
GameObject:
@ -1659,7 +1842,7 @@ GameObject:
- component: {fileID: 571998187}
m_Layer: 0
m_Name: Camera
m_TagString: Untagged
m_TagString: MainCamera
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
@ -1860,6 +2043,51 @@ CanvasRenderer:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 606057521}
m_CullTransparentMesh: 1
--- !u!1 &606618096
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 606618097}
- component: {fileID: 606618098}
m_Layer: 0
m_Name: '[TakeCare]'
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &606618097
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 606618096}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &606618098
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 606618096}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: cc9a370b3125d7145a714b241092c418, type: 3}
m_Name:
m_EditorClassIdentifier:
expressionRecognition: {fileID: 725449908}
--- !u!1001 &617804397
PrefabInstance:
m_ObjectHideFlags: 0
@ -2842,6 +3070,55 @@ PrefabInstance:
m_AddedGameObjects: []
m_AddedComponents: []
m_SourcePrefab: {fileID: 100100000, guid: 80b2fb52097f8c2409921cb59ed6c40b, type: 3}
--- !u!1 &725449906
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 725449907}
- component: {fileID: 725449908}
m_Layer: 0
m_Name: '[FaceExpression]'
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &725449907
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 725449906}
serializedVersion: 2
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children:
- {fileID: 1020181388}
- {fileID: 42567473}
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &725449908
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 725449906}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 960242e255f62c34e943f3a352559541, type: 3}
m_Name:
m_EditorClassIdentifier:
triggerEventDuration: 2
resetExpDurationInterval: 5
triggerEventCD: 5
--- !u!1 &757868372
GameObject:
m_ObjectHideFlags: 0
@ -2866,14 +3143,14 @@ Transform:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 757868372}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: -0.6697998, y: 1.4160436, z: -0.05155917}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children:
- {fileID: 2023088797}
- {fileID: 401420926}
m_Father: {fileID: 0}
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &761275341
GameObject:
@ -3350,12 +3627,12 @@ Transform:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 830768046}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 113.66112, y: 36.962967, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 0}
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &834105487
GameObject:
@ -4097,7 +4374,7 @@ MonoBehaviour:
voiceInputs: {fileID: 830768047}
voiceWakeUp: {fileID: 1674986169}
chatHistory: []
textMeshPro: {fileID: 1260374843}
chatBox: {fileID: 334146836}
currentTalking:
--- !u!4 &994815019
Transform:
@ -4112,10 +4389,169 @@ Transform:
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children:
- {fileID: 489438416}
- {fileID: 334146834}
- {fileID: 343958445}
m_Father: {fileID: 0}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1020181387
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1020181388}
- component: {fileID: 1020181393}
- component: {fileID: 1020181392}
- component: {fileID: 1020181391}
- component: {fileID: 1020181390}
- component: {fileID: 1020181389}
m_Layer: 0
m_Name: OpenCV-UVC
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 0
--- !u!4 &1020181388
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
serializedVersion: 2
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.1619103, y: 1.36112, z: 2.4244072}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 725449907}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1020181389
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a07b9addfb2161e4b8fbdc35b3ed9531, type: 3}
m_Name:
m_EditorClassIdentifier:
testInputImage:
--- !u!114 &1020181390
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 84a7daacad6b09f48b400beb5b4ddb06, type: 3}
m_Name:
m_EditorClassIdentifier:
_requestedDeviceName: 1
_requestedWidth: 640
_requestedHeight: 480
_requestedIsFrontFacing: 1
_requestedFPS: 17
_rotate90Degree: 0
_flipVertical: 0
_flipHorizontal: 0
_outputColorFormat: 1
_timeoutFrameCount: 300
onInitialized:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 1020181389}
m_TargetAssemblyTypeName: OpenCVForUnityExample.FacialExpressionRecognition_UVC,
Assembly-CSharp
m_MethodName: OnWebCamTextureToMatHelperInitialized
m_Mode: 1
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
onDisposed:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 1020181389}
m_TargetAssemblyTypeName: OpenCVForUnityExample.FacialExpressionRecognition_UVC,
Assembly-CSharp
m_MethodName: OnWebCamTextureToMatHelperDisposed
m_Mode: 1
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
onErrorOccurred:
m_PersistentCalls:
m_Calls: []
uvcTexture: {fileID: 0}
avoidAndroidFrontCameraLowLightIssue: 0
--- !u!114 &1020181391
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: f7e3fa0f7bdc68444abad301469f7cf7, type: 3}
m_Name:
m_EditorClassIdentifier:
DefaultWidth: 1280
DefaultHeight: 720
PreferH264: 0
RenderBeforeSceneRendering: 0
UVCDrawers:
- {fileID: 1020181393}
--- !u!114 &1020181392
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 6eca0239a3e830b45b761684e2a8c8ca, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!114 &1020181393
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1020181387}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: b48c16e283709764bb9d934a3f73886c, type: 3}
m_Name:
m_EditorClassIdentifier:
sizeScale: 0.5
isFrontFacing: 0
UVCFilters:
- Description:
Vid: 0
Pid: 0
DeviceName: /dev/bus/usb/002/008
IsExclude: 0
--- !u!1 &1174160297
GameObject:
m_ObjectHideFlags: 0
@ -4327,7 +4763,7 @@ MonoBehaviour:
m_OnCullStateChanged:
m_PersistentCalls:
m_Calls: []
m_text: Thinking...
m_text:
m_isRightToLeft: 0
m_fontAsset: {fileID: 11400000, guid: 8f586378b4e144a9851e7b34d9b748ee, type: 2}
m_sharedMaterial: {fileID: 2180264, guid: 8f586378b4e144a9851e7b34d9b748ee, type: 2}
@ -4354,8 +4790,8 @@ MonoBehaviour:
m_faceColor:
serializedVersion: 2
rgba: 4294967295
m_fontSize: 1.86
m_fontSizeBase: 1.86
m_fontSize: 1.4
m_fontSizeBase: 1.4
m_fontWeight: 400
m_enableAutoSizing: 0
m_fontSizeMin: 18
@ -4835,12 +5271,12 @@ Transform:
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1674986168}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.89272064, y: 0.28622746, z: -1.1412808}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children: []
m_Father: {fileID: 0}
m_Father: {fileID: 1867627730}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!4 &1693404508 stripped
Transform:
@ -5560,7 +5996,8 @@ MonoBehaviour:
m_Name:
m_EditorClassIdentifier:
url: https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant
m_Prompt: "\u4F60\u626E\u6F14\u540D\u53EBcare bot\u7684\u5927\u4F17\u6C7D\u8F66\u673A\u5668\u4EBA\u52A9\u624B\u548C\u6211\u5BF9\u8BDD\uFF0C100\u5B57\u4EE5\u5185\uFF0C\u4E0D\u8981\u4F7F\u7528\u8868\u60C5"
m_Prompt: You play the role of a Volkswagen robot assistant named "care bot" and
talk to me in 50 words or less, keep it short and don't use emojis.
lan: english
m_HistoryKeepCount: 15
m_DataList: []
@ -5583,6 +6020,43 @@ MonoBehaviour:
m_GetTokenFromServer: 0
m_Token: 24.a5b8cc45726132275717268035aa4f9f.2592000.1701917245.282335-42480460
m_AuthorizeURL: https://aip.baidubce.com/oauth/2.0/token
--- !u!1 &1867627729
GameObject:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
serializedVersion: 6
m_Component:
- component: {fileID: 1867627730}
m_Layer: 0
m_Name: '[Scripts]'
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &1867627730
Transform:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 1867627729}
serializedVersion: 2
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_ConstrainProportionsScale: 0
m_Children:
- {fileID: 830768048}
- {fileID: 1674986170}
- {fileID: 757868373}
- {fileID: 178558178}
- {fileID: 725449907}
- {fileID: 606618097}
m_Father: {fileID: 0}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1871013865
GameObject:
m_ObjectHideFlags: 0
@ -5842,9 +6316,12 @@ MonoBehaviour:
m_EditorClassIdentifier:
m_PostURL:
saySorry: {fileID: 8300000, guid: c86b1d89cfa640f479dfb144ae41de2c, type: 3}
lookBad: {fileID: 8300000, guid: 1a832d6973b62754e8b3ed14465a278b, type: 3}
sayHello:
- {fileID: 8300000, guid: 1c56f34f88846f44fbcec4823676fdd4, type: 3}
- {fileID: 8300000, guid: 38a3652b1638ddb47859c87bc97e0fc8, type: 3}
lookHappy: {fileID: 0}
lookSurprise: {fileID: 8300000, guid: 3c6e71f6457937b469d6f6fae991700c, type: 3}
m_AzureSettings: {fileID: 2023088800}
voiceName: en-US-GuyNeural
style: chat
@ -6274,13 +6751,9 @@ SceneRoots:
- {fileID: 571998185}
- {fileID: 761275345}
- {fileID: 549504102}
- {fileID: 178558178}
- {fileID: 757868373}
- {fileID: 854293664}
- {fileID: 144129215}
- {fileID: 489438416}
- {fileID: 1674986170}
- {fileID: 830768048}
- {fileID: 1181482635}
- {fileID: 657372508}
- {fileID: 994815019}
- {fileID: 1181482635}
- {fileID: 1867627730}

View File

@ -25,7 +25,7 @@ public class CarAssistant : MonoBehaviour
public VoiceWakeUp voiceWakeUp;
//保存聊天记录
public List<string> chatHistory;
public TextMeshPro textMeshPro;
public ChatBox chatBox;
public string currentTalking = "";
/// <summary>
/// 启用语音唤醒
@ -50,10 +50,13 @@ public class CarAssistant : MonoBehaviour
}
}
bool _EnableVoiceWakeup = false;
[NonSerialized]
public Expression expression = Expression.Neutral;
public event Action<string> onReceiveText;
private void Awake()
{
audioSource.dopplerLevel = 0;
statusManager = new StatusManager(this);
}
@ -81,6 +84,7 @@ public class CarAssistant : MonoBehaviour
{
if (successed)
{
PlayPresetAudioClip(TTS.PresetAudio.SayHi);
statusManager.MakeTransition(statusManager.listening);
}
}
@ -108,8 +112,17 @@ public class CarAssistant : MonoBehaviour
public void PlayAudioClip(AudioClip clip)
{
audioSource.clip = clip;
audioSource.Play();
audioSource.PlayOneShot(clip);
}
/// <summary>
/// 播放预设的音频
/// </summary>
/// <param name="presetAudio"></param>
public void PlayPresetAudioClip(TTS.PresetAudio presetAudio)
{
chatSettings.m_TextToSpeech.Speak(presetAudio, (clip) => {
audioSource.PlayOneShot(clip);
});
}
/// <summary>
@ -177,4 +190,14 @@ public class CarAssistant : MonoBehaviour
statusManager.MakeTransition(statusManager.talking);
});
}
public enum Expression
{
Neutral = 0,
Happy = 1,
Sad = 2,
Doubt = 3,
Suprised = 4,
Smile = 5
}
}

View File

@ -9,7 +9,7 @@ public class Status_Idle : AssistantStatus
base.EnterState();
assistant.animController.Idle();
assistant.EnableVoiceWakeup = true;
assistant.textMeshPro.text = "Idle...";
assistant.chatBox.SetText("Idle...", assistant.expression);
}
public override void QuitState()
{

View File

@ -11,12 +11,9 @@ public class Status_Listening : AssistantStatus
{
base.EnterState();
assistant.animController.SayHi();
assistant.chatSettings.m_TextToSpeech.Speak(TTS.PresetAudio.SayHi,(clip)=> {
assistant.PlayAudioClip(clip);
});
assistant.voiceInputs.StartRecordAudio();
assistant.EnableVoiceWakeup = false;
assistant.textMeshPro.text = "Listening...";
assistant.chatBox.SetText("Listening...", assistant.expression);
}
public override void Update()
{

View File

@ -10,7 +10,7 @@ public class Status_Talking : AssistantStatus
assistant.animController.StartSpeek();
PlayVoice(assistant.clip, assistant.currentTalking);
assistant.EnableVoiceWakeup = true;
assistant.textMeshPro.text = "Talking...";
assistant.chatBox.SetText("Talking...", assistant.expression);
}
public override void QuitState()

View File

@ -10,7 +10,7 @@ public class Status_Thinking : AssistantStatus
base.EnterState();
assistant.animController.Thinking();
assistant.EnableVoiceWakeup = false;
assistant.textMeshPro.text = "Thinking...";
assistant.chatBox.SetText("Thinking...",assistant.expression);
}
public override void Update()

View File

@ -0,0 +1,54 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class TakeCare : MonoBehaviour
{
public ExpressionRecognition expressionRecognition;
CarAssistant carAssistant;
private void Awake()
{
carAssistant = FindObjectOfType<CarAssistant>();
expressionRecognition.onRecognizingExpression += ProcessExpression;
}
public void ProcessExpression(FaceInfo.Expression expression)
{
Debug.Log("处理表情:" + expression);
switch (expression)
{
case FaceInfo.Expression.Neutral:
break;
case FaceInfo.Expression.Happy://播放彩蛋
carAssistant.animController.Dance();
carAssistant.expression = CarAssistant.Expression.Happy;
carAssistant.chatBox.SetText("", CarAssistant.Expression.Happy);
break;
case FaceInfo.Expression.Sad:
case FaceInfo.Expression.Disgust:
case FaceInfo.Expression.Angry:
//问候
StopAllCoroutines();
carAssistant.PlayPresetAudioClip(TTS.PresetAudio.LookBad);
carAssistant.expression = CarAssistant.Expression.Smile;
StartCoroutine(TransitionToStatusDelay(carAssistant.statusManager.listening, 2f));
break;
case FaceInfo.Expression.Suprised:
//问候
StopAllCoroutines();
carAssistant.PlayPresetAudioClip(TTS.PresetAudio.LookSurprised);
carAssistant.expression = CarAssistant.Expression.Doubt;
StartCoroutine(TransitionToStatusDelay(carAssistant.statusManager.listening, 2f));
break;
case FaceInfo.Expression.Fearful:
break;
default:
break;
}
}
IEnumerator TransitionToStatusDelay(AssistantStatus status,float delay)
{
yield return new WaitForSeconds(delay);
carAssistant.statusManager.MakeTransition(status);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cc9a370b3125d7145a714b241092c418
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -11,5 +11,6 @@ public abstract class CharacterAnimControlBase : MonoBehaviour
public abstract void Thinking();
public abstract void Idle();
public abstract void Dance();
}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 093673d5dde9d24489180575c5618d29
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,131 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Events;
public class ExpressionRecognition : MonoBehaviour
{
public FaceInfo.Expression currentExpression
{
get
{
return _currentExpression;
}
private set
{
if(_currentExpression != value)
{
_currentExpression = value;
if(!expressionDuration.ContainsKey(value))
{
expressionDuration.Add(value, new ExpressionDuration());
}
}
}
}
FaceInfo.Expression _currentExpression;
Dictionary<FaceInfo.Expression, ExpressionDuration> expressionDuration = new Dictionary<FaceInfo.Expression, ExpressionDuration>();
/// <summary>
/// 当检测到一个表情持续一定时间后,触发事件
/// </summary>
public float triggerEventDuration = 2;
/// <summary>
/// 当一个表情长时间未检测到,就重置它的状态
/// </summary>
public float resetExpDurationInterval = 5;
/// <summary>
/// 触发表情事件后的冷却时间
/// </summary>
public float triggerEventCD = 20;
public event UnityAction<FaceInfo.Expression> onRecognizingExpression;
IFaceRecognizer recognizer;
float triggerCDTimer = 0;
class ExpressionDuration
{
/// <summary>
/// 检测到的时长
/// </summary>
public float duration
{
get
{
return _duration;
}
set
{
_duration = value;
unrecognizedInterval = 0;
}
}
float _duration;
/// <summary>
/// 未检测到的时间间隔
/// </summary>
public float unrecognizedInterval;
}
private void Awake()
{
recognizer = GetComponentInChildren<IFaceRecognizer>(false);
recognizer.BindListener(OnRecognizedFaces);
triggerCDTimer = triggerEventCD;
}
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
triggerCDTimer += Time.deltaTime;
foreach (var item in expressionDuration)
{
if(item.Key == currentExpression)
{
item.Value.duration += Time.deltaTime;
//当前表情持续时间大于一定值后,触发表情识别事件
if (item.Key != FaceInfo.Expression.Neutral && item.Value.duration > triggerEventDuration && triggerCDTimer > triggerEventCD)
{
Debug.Log("触发表情:" + currentExpression);
triggerCDTimer = 0;
if (onRecognizingExpression != null)
onRecognizingExpression(item.Key);
}
}
else
{
item.Value.unrecognizedInterval += Time.deltaTime;
if (item.Value.unrecognizedInterval > resetExpDurationInterval)
{
item.Value.duration = 0;
}
}
}
}
void OnRecognizedFaces(FaceInfo[] infos)
{
if(infos!= null && infos.Length > 0)
{
FaceInfo largestFace = infos[0];
for (int i = 1; i < infos.Length; i++)
{
var info = infos[i];
//取面积最大的一个
if (info.box.height * info.box.width > largestFace.box.height * largestFace.box.width)
{
largestFace = info;
}
}
currentExpression = largestFace.expression;
return;
}
else
{
currentExpression = FaceInfo.Expression.Neutral;
return;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 960242e255f62c34e943f3a352559541
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,28 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FaceInfo
{
public enum Expression
{
Neutral = 0,
Happy = 1,
Sad = 2,
Disgust = 3,
Suprised = 4,
Fearful = 5,
Angry
}
public enum Gender
{
N = 0,
Male,
Female
}
public Expression expression;
public Gender gender;
public int age;
public float confidence;
public Rect box;
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fbbadf751503a8b44b88b05bb5798624
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,463 @@
#if !UNITY_WSA_10_0
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample.DnnModel;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace OpenCVForUnityExample
{
/// <summary>
/// Facial Expression Recognition Example
/// An example of using OpenCV dnn module with Facial Expression Recognition.
/// Referring to https://github.com/opencv/opencv_zoo/tree/master/models/facial_expression_recognition
/// </summary>
[RequireComponent(typeof(UVCCameraToMatHelper))]
public class FacialExpressionRecognition_UVC : MonoBehaviour,IFaceRecognizer
{
[Header("TEST")]
[TooltipAttribute("Path to test input image.")]
public string testInputImage;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
UVCCameraToMatHelper usbCamTextureToMatHelper;
/// <summary>
/// The bgr mat.
/// </summary>
Mat bgrMat;
/// <summary>
/// The facial expression recognizer.
/// </summary>
MFacialExpressionRecognizer facialExpressionRecognizer;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
/// <summary>
/// FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME
/// </summary>
protected static readonly string FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME = "OpenCVForUnity/dnn/facial_expression_recognition_mobilefacenet_2022july.onnx";
/// <summary>
/// The facial expression recognition model filepath.
/// </summary>
string facial_expression_recognition_model_filepath;
/// <summary>
/// FACE_RECOGNITION_MODEL_FILENAME
/// </summary>
protected static readonly string FACE_RECOGNITION_MODEL_FILENAME = "OpenCVForUnity/dnn/face_recognition_sface_2021dec.onnx";
/// <summary>
/// The face recognition model filepath.
/// </summary>
string face_recognition_model_filepath;
/// <summary>
/// The YuNetV2FaceDetector.
/// </summary>
YuNetV2FaceDetector faceDetector;
int inputSizeW = 320;
int inputSizeH = 320;
float scoreThreshold = 0.9f;
float nmsThreshold = 0.3f;
int topK = 5000;
/// <summary>
/// FACE_DETECTION_MODEL_FILENAME
/// </summary>
protected static readonly string FACE_DETECTION_MODEL_FILENAME = "OpenCVForUnity/dnn/face_detection_yunet_2023mar.onnx";
/// <summary>
/// The face detection model filepath.
/// </summary>
string face_detection_model_filepath;
event Action<FaceInfo[]> onReconizedFaces;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
fpsMonitor = GetComponent<FpsMonitor>();
usbCamTextureToMatHelper = gameObject.GetComponent<UVCCameraToMatHelper>();
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
face_detection_model_filepath = Utils.getFilePath(FACE_DETECTION_MODEL_FILENAME);
facial_expression_recognition_model_filepath = Utils.getFilePath(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME);
face_recognition_model_filepath = Utils.getFilePath(FACE_RECOGNITION_MODEL_FILENAME);
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync(FACE_DETECTION_MODEL_FILENAME, (result) =>
{
face_detection_model_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME, (result) =>
{
facial_expression_recognition_model_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
var getFilePathAsync_2_Coroutine = Utils.getFilePathAsync(FACE_RECOGNITION_MODEL_FILENAME, (result) =>
{
face_recognition_model_filepath = result;
});
yield return getFilePathAsync_2_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
// Use this for initialization
void Run()
{
//if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
Utils.setDebugMode(true);
if (string.IsNullOrEmpty(face_detection_model_filepath))
{
Debug.LogError(FACE_DETECTION_MODEL_FILENAME + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{
faceDetector = new YuNetV2FaceDetector(face_detection_model_filepath, "", new Size(inputSizeW, inputSizeH), scoreThreshold, nmsThreshold, topK);
}
if (string.IsNullOrEmpty(facial_expression_recognition_model_filepath) || string.IsNullOrEmpty(face_recognition_model_filepath))
{
Debug.LogError(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME + " or " + FACE_RECOGNITION_MODEL_FILENAME + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{
facialExpressionRecognizer = new MFacialExpressionRecognizer(facial_expression_recognition_model_filepath, face_recognition_model_filepath, "");
}
if (string.IsNullOrEmpty(testInputImage))
{
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
usbCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
usbCamTextureToMatHelper.Initialize();
}
else
{
/////////////////////
// TEST
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("OpenCVForUnity/dnn/" + testInputImage, (result) =>
{
string test_input_image_filepath = result;
if (string.IsNullOrEmpty(test_input_image_filepath)) Debug.Log("The file:" + testInputImage + " did not exist in the folder “Assets/StreamingAssets/OpenCVForUnity/dnn”.");
Mat img = Imgcodecs.imread(test_input_image_filepath);
if (img.empty())
{
img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
Imgproc.putText(img, testInputImage + " is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
else
{
TickMeter tm = new TickMeter();
tm.start();
Mat faces = faceDetector.infer(img);
tm.stop();
Debug.Log("YuNetFaceDetector Inference time, ms: " + tm.getTimeMilli());
List<Mat> expressions = new List<Mat>();
// Estimate the expression of each face
for (int i = 0; i < faces.rows(); ++i)
{
tm.reset();
tm.start();
// Facial expression recognizer inference
Mat facialExpression = facialExpressionRecognizer.infer(img, faces.row(i));
tm.stop();
Debug.Log("FacialExpressionRecognizer Inference time (preprocess + infer + postprocess), ms: " + tm.getTimeMilli());
if (!facialExpression.empty())
expressions.Add(facialExpression);
}
faceDetector.visualize(img, faces, true, false);
facialExpressionRecognizer.visualize(img, expressions, faces, true, false);
}
gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
float imageWidth = img.width();
float imageHeight = img.height();
float widthScale = (float)Screen.width / imageWidth;
float heightScale = (float)Screen.height / imageHeight;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = imageHeight / 2;
}
Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(img, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
});
StartCoroutine(getFilePathAsync_0_Coroutine);
/////////////////////
}
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = usbCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(webCamTextureMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", webCamTextureMat.width().ToString());
fpsMonitor.Add("height", webCamTextureMat.height().ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
bgrMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (bgrMat != null)
bgrMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (usbCamTextureToMatHelper.IsPlaying() && usbCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = usbCamTextureToMatHelper.GetMat();
if (faceDetector == null || facialExpressionRecognizer == null)
{
Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
else
{
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
//TickMeter tm = new TickMeter();
//tm.start();
Mat faces = faceDetector.infer(bgrMat);
//tm.stop();
//Debug.Log("YuNetFaceDetector Inference time, ms: " + tm.getTimeMilli());
List<Mat> expressions = new List<Mat>();
// Estimate the expression of each face
for (int i = 0; i < faces.rows(); ++i)
{
//tm.reset();
//tm.start();
// Facial expression recognizer inference
Mat facialExpression = facialExpressionRecognizer.infer(bgrMat, faces.row(i));
//tm.stop();
//Debug.Log("FacialExpressionRecognizer Inference time (preprocess + infer + postprocess), ms: " + tm.getTimeMilli());
if (!facialExpression.empty())
expressions.Add(facialExpression);
}
Imgproc.cvtColor(bgrMat, rgbaMat, Imgproc.COLOR_BGR2RGBA);
//faceDetector.visualize(rgbaMat, faces, false, true);
FaceInfo[] infos = facialExpressionRecognizer.visualize(rgbaMat, expressions, faces, true);
if (onReconizedFaces != null)
onReconizedFaces(infos);
}
Utils.matToTexture2D(rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
usbCamTextureToMatHelper.Dispose();
if (faceDetector != null)
faceDetector.dispose();
if (facialExpressionRecognizer != null)
facialExpressionRecognizer.dispose();
Utils.setDebugMode(false);
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("OpenCVForUnityExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
usbCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
usbCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
usbCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
usbCamTextureToMatHelper.requestedIsFrontFacing = !usbCamTextureToMatHelper.requestedIsFrontFacing;
}
public FaceInfo[] GetFaceInfos()
{
throw new NotImplementedException();
}
public void BindListener(Action<FaceInfo[]> onRecognizedHandler)
{
onReconizedFaces += onRecognizedHandler;
}
public void RemoveListener(Action<FaceInfo[]> onRecognizedHandler)
{
onReconizedFaces -= onRecognizedHandler;
}
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a07b9addfb2161e4b8fbdc35b3ed9531
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,10 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public interface IFaceRecognizer
{
void BindListener(Action<FaceInfo[]> onRecognizedHandler);
void RemoveListener(Action<FaceInfo[]> onRecognizedHandler);
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a7b970a0e0890864ebc966baf1686d13
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,458 @@
#if !UNITY_WSA_10_0
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample.DnnModel;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace OpenCVForUnityExample
{
/// <summary>
/// Facial Expression Recognition Example
/// An example of using OpenCV dnn module with Facial Expression Recognition.
/// Referring to https://github.com/opencv/opencv_zoo/tree/master/models/facial_expression_recognition
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class MFacialExpressionRecognition : MonoBehaviour,IFaceRecognizer
{
[Header("TEST")]
[TooltipAttribute("Path to test input image.")]
public string testInputImage;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The bgr mat.
/// </summary>
Mat bgrMat;
/// <summary>
/// The facial expression recognizer.
/// </summary>
MFacialExpressionRecognizer facialExpressionRecognizer;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
/// <summary>
/// FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME
/// </summary>
protected static readonly string FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME = "OpenCVForUnity/dnn/facial_expression_recognition_mobilefacenet_2022july.onnx";
/// <summary>
/// The facial expression recognition model filepath.
/// </summary>
string facial_expression_recognition_model_filepath;
/// <summary>
/// FACE_RECOGNITION_MODEL_FILENAME
/// </summary>
protected static readonly string FACE_RECOGNITION_MODEL_FILENAME = "OpenCVForUnity/dnn/face_recognition_sface_2021dec.onnx";
/// <summary>
/// The face recognition model filepath.
/// </summary>
string face_recognition_model_filepath;
/// <summary>
/// The YuNetV2FaceDetector.
/// </summary>
YuNetV2FaceDetector faceDetector;
int inputSizeW = 320;
int inputSizeH = 320;
float scoreThreshold = 0.9f;
float nmsThreshold = 0.3f;
int topK = 5000;
/// <summary>
/// FACE_DETECTION_MODEL_FILENAME
/// </summary>
protected static readonly string FACE_DETECTION_MODEL_FILENAME = "OpenCVForUnity/dnn/face_detection_yunet_2023mar.onnx";
/// <summary>
/// The face detection model filepath.
/// </summary>
string face_detection_model_filepath;
event Action<FaceInfo[]> onReconizedFaces;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
fpsMonitor = GetComponent<FpsMonitor>();
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
face_detection_model_filepath = Utils.getFilePath(FACE_DETECTION_MODEL_FILENAME);
facial_expression_recognition_model_filepath = Utils.getFilePath(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME);
face_recognition_model_filepath = Utils.getFilePath(FACE_RECOGNITION_MODEL_FILENAME);
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync(FACE_DETECTION_MODEL_FILENAME, (result) =>
{
face_detection_model_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME, (result) =>
{
facial_expression_recognition_model_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
var getFilePathAsync_2_Coroutine = Utils.getFilePathAsync(FACE_RECOGNITION_MODEL_FILENAME, (result) =>
{
face_recognition_model_filepath = result;
});
yield return getFilePathAsync_2_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
// Use this for initialization
void Run()
{
//if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
Utils.setDebugMode(true);
if (string.IsNullOrEmpty(face_detection_model_filepath))
{
Debug.LogError(FACE_DETECTION_MODEL_FILENAME + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{
faceDetector = new YuNetV2FaceDetector(face_detection_model_filepath, "", new Size(inputSizeW, inputSizeH), scoreThreshold, nmsThreshold, topK);
}
if (string.IsNullOrEmpty(facial_expression_recognition_model_filepath) || string.IsNullOrEmpty(face_recognition_model_filepath))
{
Debug.LogError(FACIAL_EXPRESSION_RECOGNITION_MODEL_FILENAME + " or " + FACE_RECOGNITION_MODEL_FILENAME + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{
facialExpressionRecognizer = new MFacialExpressionRecognizer(facial_expression_recognition_model_filepath, face_recognition_model_filepath, "");
}
if (string.IsNullOrEmpty(testInputImage))
{
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
webCamTextureToMatHelper.Initialize();
}
else
{
/////////////////////
// TEST
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("OpenCVForUnity/dnn/" + testInputImage, (result) =>
{
string test_input_image_filepath = result;
if (string.IsNullOrEmpty(test_input_image_filepath)) Debug.Log("The file:" + testInputImage + " did not exist in the folder “Assets/StreamingAssets/OpenCVForUnity/dnn”.");
Mat img = Imgcodecs.imread(test_input_image_filepath);
if (img.empty())
{
img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
Imgproc.putText(img, testInputImage + " is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
else
{
TickMeter tm = new TickMeter();
tm.start();
Mat faces = faceDetector.infer(img);
tm.stop();
Debug.Log("YuNetFaceDetector Inference time, ms: " + tm.getTimeMilli());
List<Mat> expressions = new List<Mat>();
// Estimate the expression of each face
for (int i = 0; i < faces.rows(); ++i)
{
tm.reset();
tm.start();
// Facial expression recognizer inference
Mat facialExpression = facialExpressionRecognizer.infer(img, faces.row(i));
tm.stop();
Debug.Log("FacialExpressionRecognizer Inference time (preprocess + infer + postprocess), ms: " + tm.getTimeMilli());
if (!facialExpression.empty())
expressions.Add(facialExpression);
}
faceDetector.visualize(img, faces, true, false);
facialExpressionRecognizer.visualize(img, expressions, faces, true, false);
}
gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
float imageWidth = img.width();
float imageHeight = img.height();
float widthScale = (float)Screen.width / imageWidth;
float heightScale = (float)Screen.height / imageHeight;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = imageHeight / 2;
}
Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(img, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
});
StartCoroutine(getFilePathAsync_0_Coroutine);
/////////////////////
}
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(webCamTextureMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", webCamTextureMat.width().ToString());
fpsMonitor.Add("height", webCamTextureMat.height().ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
bgrMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (bgrMat != null)
bgrMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
if (faceDetector == null || facialExpressionRecognizer == null)
{
Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
}
else
{
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
//TickMeter tm = new TickMeter();
//tm.start();
Mat faces = faceDetector.infer(bgrMat);
//tm.stop();
//Debug.Log("YuNetFaceDetector Inference time, ms: " + tm.getTimeMilli());
List<Mat> expressions = new List<Mat>();
// Estimate the expression of each face
for (int i = 0; i < faces.rows(); ++i)
{
//tm.reset();
//tm.start();
// Facial expression recognizer inference
Mat facialExpression = facialExpressionRecognizer.infer(bgrMat, faces.row(i));
//tm.stop();
//Debug.Log("FacialExpressionRecognizer Inference time (preprocess + infer + postprocess), ms: " + tm.getTimeMilli());
if (!facialExpression.empty())
expressions.Add(facialExpression);
}
Imgproc.cvtColor(bgrMat, rgbaMat, Imgproc.COLOR_BGR2RGBA);
//faceDetector.visualize(rgbaMat, faces, false, true);
FaceInfo[] infos = facialExpressionRecognizer.visualize(rgbaMat, expressions, faces, true);
if (onReconizedFaces != null)
onReconizedFaces(infos);
}
Utils.matToTexture2D(rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
if (faceDetector != null)
faceDetector.dispose();
if (facialExpressionRecognizer != null)
facialExpressionRecognizer.dispose();
Utils.setDebugMode(false);
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("OpenCVForUnityExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.requestedIsFrontFacing;
}
public void BindListener(Action<FaceInfo[]> onRecognizedHandler)
{
onReconizedFaces += onRecognizedHandler;
}
public void RemoveListener(Action<FaceInfo[]> onRecognizedHandler)
{
onReconizedFaces -= onRecognizedHandler;
}
}
}
#endif

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 956d8fd069f661f4aa9eb1055d305572
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,127 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.DnnModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnityExample.DnnModel;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using UnityEngine;
public class MFacialExpressionRecognizer : FacialExpressionRecognizer
{
List<Scalar> mPalette = new List<Scalar>();
public MFacialExpressionRecognizer(string modelFilepath, string SF_modelFilepath, string SF_configFilepath, int backend = Dnn.DNN_BACKEND_OPENCV, int target = Dnn.DNN_TARGET_CPU):base(modelFilepath,SF_modelFilepath,SF_configFilepath,backend,target)
{
mPalette = new List<Scalar>();
mPalette.Add(new Scalar(255, 56, 56, 255));
mPalette.Add(new Scalar(82, 0, 133, 255));
mPalette.Add(new Scalar(52, 69, 147, 255));
mPalette.Add(new Scalar(255, 178, 29, 255));
mPalette.Add(new Scalar(55, 55, 55, 255));
mPalette.Add(new Scalar(100, 115, 255, 255));
mPalette.Add(new Scalar(255, 112, 31, 255));
}
public FaceInfo[] visualize(Mat image, List<Mat> results, Mat faces, bool isRGB = false)
{
if (image.IsDisposed)
return new FaceInfo[0];
if (results.Count != faces.rows())
return new FaceInfo[0];
StringBuilder sb = null;
//if (print_results)
// sb = new StringBuilder();
FaceInfo[] faceInfos = new FaceInfo[results.Count];
for (int i = 0; i < results.Count; ++i)
{
float[] face_box = new float[4];
faces.get(i, 0, face_box);
float left = face_box[0] + 2;
float top = face_box[1] + 2;
float right = face_box[0] + face_box[2] - 2;
float bottom = face_box[1] + face_box[3] - 2;
ClassificationData bmData = getBestMatchData(results[i]);
int classId = (int)bmData.cls;
string label = getClassLabel(bmData.cls) + ", " + String.Format("{0:0.0000}", bmData.conf);
Scalar c = mPalette[classId % mPalette.Count];
Scalar color = isRGB ? c : new Scalar(c.val[2], c.val[1], c.val[0], c.val[3]);
// draw box
Imgproc.rectangle(image, new Point(left, top), new Point(right, bottom), color, 2);
// draw label
int[] baseLine = new int[1];
Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
top = Mathf.Max((float)top, (float)labelSize.height);
Imgproc.rectangle(image, new Point(left, top + 2),
new Point(left + labelSize.width, top + labelSize.height + baseLine[0] + 2), color, Core.FILLED);
Imgproc.putText(image, label, new Point(left, top + labelSize.height + 2), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, Scalar.all(255), 1, Imgproc.LINE_AA);
FaceInfo info = new FaceInfo();
info.expression = getClassExp(bmData.cls);
info.confidence = bmData.conf;
info.box = new UnityEngine.Rect(left, bottom, right - left, top - bottom);
faceInfos[i] = info;
// Print results
//if (print_results)
//{
// sb.AppendLine(String.Format("-----------expression {0}-----------", i + 1));
//sb.AppendLine(String.Format("Best match: " + getClassLabel(bmData.cls) + ", " + bmData));
//}
}
//if (print_results)
// Debug.Log(sb);
return faceInfos;
}
public FaceInfo.Expression getClassExp(float id)
{
/*
* ("angry");
("disgust");
("fearful");
("happy");
("neutral");
("sad");
("surprised");
* */
int index = (int)id;
if(index >= 0)
{
switch (index)
{
case 0:
return FaceInfo.Expression.Angry;
case 1:
return FaceInfo.Expression.Disgust;
case 2:
return FaceInfo.Expression.Fearful;
case 3:
return FaceInfo.Expression.Happy;
case 4:
return FaceInfo.Expression.Neutral;
case 5:
return FaceInfo.Expression.Sad;
case 6:
return FaceInfo.Expression.Suprised;
default:
return FaceInfo.Expression.Neutral;
}
}
return FaceInfo.Expression.Neutral;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 847d49fa864db24469ce713456829df2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 84a7daacad6b09f48b400beb5b4ddb06
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,256 @@
using OpenCVForUnity.UnityUtils;
using Serenegiant.UVC;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class UVCTexture : MonoBehaviour, IUVCDrawer
{
/// <summary>
/// 图片尺寸裁剪
/// </summary>
public float textureClipArea
{
get
{
if(_textureClipArea<=0)
{
_textureClipArea = PlayerPrefs.GetFloat("clipArea", 0.5f);
}
return _textureClipArea;
}
}
float _textureClipArea = -1;
/// <summary>
/// 图片裁剪位置X
/// </summary>
public float clipOffsetX
{
get
{
if(_clipOffsetX < -1)
{
_clipOffsetX = PlayerPrefs.GetFloat("clipOffsetX", 0);
}
return _clipOffsetX;
}
}
float _clipOffsetX = -2;
/// <summary>
/// 图片裁剪位置
/// </summary>
public float clipOffsetY
{
get
{
if (_clipOffsetY < -1)
{
_clipOffsetY = PlayerPrefs.GetFloat("clipOffsetY", 0);
}
return _clipOffsetY;
}
}
float _clipOffsetY = -2;
public int height
{
get
{
return texture.height;
}
}
public int width
{
get
{
return texture.width;
}
}
public float requestedFPS
{
get
{
return _requestedFPS;
}
set
{
_requestedFPS = value;
}
}
float _requestedFPS;
public bool didUpdateThisFrame
{
get
{
return _didUpdateThisFrame;
}
}
bool _didUpdateThisFrame = false;
/// <summary>
/// 图片缩放,最好不要缩放,影响识别效果
/// </summary>
[Range(0.3f,1)]
public float sizeScale = 0.5f;
public bool isPlaying
{
get
{
return _isPlaying;
}
}
bool _isPlaying;
/// <summary>
/// 是否为前置相机
/// </summary>
public bool isFrontFacing = false;
public UVCFilter[] UVCFilters;
private const string TAG = "UVCDrawer#";
UVCManager uvcManager;
public Texture2D texture
{
get
{
return camTexture;
}
}
Texture2D camTexture;
public bool CanDraw(UVCManager manager, UVCDevice device)
{
return UVCFilter.Match(device, UVCFilters);
}
public bool OnUVCAttachEvent(UVCManager manager, UVCDevice device)
{
var result = !device.IsRicoh || device.IsTHETA;
result &= UVCFilter.Match(device, UVCFilters);
return result;
}
public void OnUVCDetachEvent(UVCManager manager, UVCDevice device)
{
Debug.Log("uvc detach event");
}
public void OnUVCStartEvent(UVCManager manager, UVCDevice device, Texture tex)
{
uvcManager = manager;
HandleOnStartPreview(tex);
}
public void OnUVCStopEvent(UVCManager manager, UVCDevice device)
{
HandleOnStopPreview();
}
private void HandleOnStartPreview(Texture tex)
{
int width = (int)(tex.width * sizeScale * textureClipArea);
int height = (int)(tex.height * sizeScale * textureClipArea);
camTexture = new Texture2D(width, height, TextureFormat.RGB24, false);// (Texture2D)tex;
Debug.Log("uvc texture start");
_isPlaying = true;
_didUpdateThisFrame = true;
}
void Update()
{
if (uvcManager != null)
{
var devices = uvcManager.GetAttachedDevices();
if(devices.Count > 0)
{
textureToTexture2D(devices[0].previewTexture, camTexture);
}
}
//if (uvcManager != null)
//{
// camTexture = (Texture2D)uvcManager.GetAttachedDevices()[0].previewTexture;
// FindObjectOfType<RawImage>().texture = camTexture;
//}
//if (camTexture != null)
//{
// frameTimer += Time.deltaTime;
// float fTime = 1 / FPS;
// if (frameTimer >= fTime)
// {
// _didUpdateThisFrame = true;
// frameTimer -= fTime;
// }
// else
// {
// _didUpdateThisFrame = false;
// }
//}
}
private void HandleOnStopPreview()
{
Debug.Log("uvc stop preview");
}
public void Play()
{
}
public void Pause()
{
}
public void Stop()
{
Debug.Log("uvc texture stop");
}
public void textureToTexture2D(Texture texture,Texture2D texture2D)
{
if (texture == null)
throw new ArgumentNullException("texture");
if (texture2D == null)
throw new ArgumentNullException("texture2D");
//if (texture.width != texture2D.width || texture.height != texture2D.height)
// throw new ArgumentException("texture and texture2D need to be the same size.");
RenderTexture prevRT = RenderTexture.active;
if (texture is RenderTexture)
{
RenderTexture.active = (RenderTexture)texture;
texture2D.ReadPixels(new UnityEngine.Rect(0f, 0f, texture.width, texture.height), 0, 0, false);
texture2D.Apply(false, false);
}
else
{
int width = (int)(texture.width * sizeScale);
int height = (int)(texture.height * sizeScale);
RenderTexture tempRT = RenderTexture.GetTemporary(width, height, 0, RenderTextureFormat.ARGB32);
Graphics.Blit(texture, tempRT);
RenderTexture.active = tempRT;
Rect rect = new Rect(0, 0, width * textureClipArea, height * textureClipArea);
rect.x = (width - rect.width) * 0.5f * (1 + clipOffsetX);
rect.y = (height - rect.height) * 0.5f * (1 + clipOffsetY);
texture2D.ReadPixels(rect, 0, 0, false);
texture2D.Apply(false, false);
RenderTexture.ReleaseTemporary(tempRT);
}
RenderTexture.active = prevRT;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b48c16e283709764bb9d934a3f73886c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -40,4 +40,9 @@ public class RobotAnimControl : CharacterAnimControlBase
{
animator.SetTrigger("idle");
}
public override void Dance()
{
animator.SetTrigger("dance");
}
}

View File

@ -0,0 +1,40 @@
using System.Collections;
using System.Collections.Generic;
using TMPro;
using UnityEngine;
public class ChatBox : MonoBehaviour
{
public TextMeshPro textMeshPro;
public void SetText(string text, CarAssistant.Expression expression = CarAssistant.Expression.Neutral)
{
int emojiIndex = -1;
switch (expression)
{
case CarAssistant.Expression.Neutral:
break;
case CarAssistant.Expression.Happy:
emojiIndex = 5;
break;
case CarAssistant.Expression.Sad:
emojiIndex = 15;
break;
case CarAssistant.Expression.Doubt:
emojiIndex = 12;
break;
case CarAssistant.Expression.Suprised:
break;
case CarAssistant.Expression.Smile:
emojiIndex = 0;
break;
default:
break;
}
if(emojiIndex >= 0)
{
text += $"<sprite={emojiIndex}>";
}
textMeshPro.text = text;
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d7a321df2a6cc0143875c6264f77afea
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b1997c85496fbb041a9c98b22510ccbb
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,23 @@
fileFormatVersion: 2
guid: 1a832d6973b62754e8b3ed14465a278b
AudioImporter:
externalObjects: {}
serializedVersion: 7
defaultSettings:
serializedVersion: 2
loadType: 0
sampleRateSetting: 0
sampleRateOverride: 44100
compressionFormat: 0
quality: 1
conversionMode: 0
preloadAudioData: 0
platformSettingOverrides: {}
forceToMono: 0
normalize: 1
loadInBackground: 0
ambisonic: 0
3D: 1
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,23 @@
fileFormatVersion: 2
guid: 3c6e71f6457937b469d6f6fae991700c
AudioImporter:
externalObjects: {}
serializedVersion: 7
defaultSettings:
serializedVersion: 2
loadType: 0
sampleRateSetting: 0
sampleRateOverride: 44100
compressionFormat: 0
quality: 1
conversionMode: 0
preloadAudioData: 0
platformSettingOverrides: {}
forceToMono: 0
normalize: 1
loadInBackground: 0
ambisonic: 0
3D: 1
userData:
assetBundleName:
assetBundleVariant:

View File

@ -152,6 +152,33 @@ AnimatorStateTransition:
m_InterruptionSource: 0
m_OrderedInterruption: 1
m_CanTransitionToSelf: 1
--- !u!1102 &-3881533873121950449
AnimatorState:
serializedVersion: 6
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Robot Hip Hop Dance
m_Speed: 1
m_CycleOffset: 0
m_Transitions:
- {fileID: 5560828704810626401}
m_StateMachineBehaviours: []
m_Position: {x: 50, y: 50, z: 0}
m_IKOnFeet: 0
m_WriteDefaultValues: 1
m_Mirror: 0
m_SpeedParameterActive: 0
m_MirrorParameterActive: 0
m_CycleOffsetParameterActive: 0
m_TimeParameterActive: 0
m_Motion: {fileID: -203655887218126122, guid: 2a38c6a844640844baf14284c7046cee, type: 3}
m_Tag:
m_SpeedParameter:
m_MirrorParameter:
m_CycleOffsetParameter:
m_TimeParameter:
--- !u!1102 &-3752492594145465479
AnimatorState:
serializedVersion: 6
@ -201,6 +228,31 @@ AnimatorStateTransition:
m_InterruptionSource: 0
m_OrderedInterruption: 1
m_CanTransitionToSelf: 1
--- !u!1101 &-2049146169530471879
AnimatorStateTransition:
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name:
m_Conditions:
- m_ConditionMode: 1
m_ConditionEvent: dance
m_EventTreshold: 0
m_DstStateMachine: {fileID: 0}
m_DstState: {fileID: -3881533873121950449}
m_Solo: 0
m_Mute: 0
m_IsExit: 0
serializedVersion: 3
m_TransitionDuration: 0.25
m_TransitionOffset: 0
m_ExitTime: 0.75
m_HasExitTime: 0
m_HasFixedDuration: 1
m_InterruptionSource: 0
m_OrderedInterruption: 1
m_CanTransitionToSelf: 1
--- !u!1101 &-1458154305767465992
AnimatorStateTransition:
m_ObjectHideFlags: 1
@ -240,43 +292,49 @@ AnimatorController:
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: blink
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: angry
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: happy
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: sad
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: idle
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: hi
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 9100000}
m_Controller: {fileID: 0}
- m_Name: dance
m_Type: 9
m_DefaultFloat: 0
m_DefaultInt: 0
m_DefaultBool: 0
m_Controller: {fileID: 0}
m_AnimatorLayers:
- serializedVersion: 5
m_Name: Base Layer
@ -680,6 +738,28 @@ AnimatorState:
m_MirrorParameter:
m_CycleOffsetParameter:
m_TimeParameter:
--- !u!1101 &5560828704810626401
AnimatorStateTransition:
m_ObjectHideFlags: 1
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name:
m_Conditions: []
m_DstStateMachine: {fileID: 0}
m_DstState: {fileID: 0}
m_Solo: 0
m_Mute: 0
m_IsExit: 1
serializedVersion: 3
m_TransitionDuration: 0.25
m_TransitionOffset: 0
m_ExitTime: 0.9837838
m_HasExitTime: 1
m_HasFixedDuration: 1
m_InterruptionSource: 0
m_OrderedInterruption: 1
m_CanTransitionToSelf: 1
--- !u!1107 &5974105379841103025
AnimatorStateMachine:
serializedVersion: 6
@ -881,6 +961,9 @@ AnimatorStateMachine:
- serializedVersion: 1
m_State: {fileID: 3852747646935075765}
m_Position: {x: 550, y: 470, z: 0}
- serializedVersion: 1
m_State: {fileID: -3881533873121950449}
m_Position: {x: 550, y: 570, z: 0}
m_ChildStateMachines:
- serializedVersion: 1
m_StateMachine: {fileID: 5974105379841103025}
@ -889,6 +972,7 @@ AnimatorStateMachine:
- {fileID: -1458154305767465992}
- {fileID: -6492649378429786868}
- {fileID: 367154791439637850}
- {fileID: -2049146169530471879}
m_EntryTransitions: []
m_StateMachineTransitions:
- first: {fileID: 5974105379841103025}

View File

@ -1,23 +0,0 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// 单例
/// </summary>
/// <typeparam name="T"></typeparam>
public class Singleton<T> : MonoBehaviour where T : MonoBehaviour
{
public static T Instance
{
get
{
if (_Instance == null)
{
_Instance = FindObjectOfType<T>();
}
return _Instance;
}
}
static T _Instance;
}

View File

@ -11,14 +11,14 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StompyRobot.SRDebugger", "S
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StompyRobot.SRF", "StompyRobot.SRF.csproj", "{D8E6FCCA-686A-0FF5-1956-9A7EE37839AA}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "com.richframe.frame", "com.richframe.frame.csproj", "{A1452F01-7562-84D8-FEFD-9FC63712C72A}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StompyRobot.SRF.Editor", "StompyRobot.SRF.Editor.csproj", "{90FE4C7C-EB1B-93AA-DA9C-CA7BF2F58D4B}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StompyRobot.SRDebugger.Editor", "StompyRobot.SRDebugger.Editor.csproj", "{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "com.richframe.frame", "com.richframe.frame.csproj", "{A1452F01-7562-84D8-FEFD-9FC63712C72A}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "com.richframe.test", "com.richframe.test.csproj", "{FE183B87-8612-2E98-8A90-F248AAF50D10}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StompyRobot.SRDebugger.Editor", "StompyRobot.SRDebugger.Editor.csproj", "{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EnoxSoftware.OpenCVForUnity.Editor", "EnoxSoftware.OpenCVForUnity.Editor.csproj", "{81B7A7AE-8EC2-866E-0C01-1CE46F30A594}"
EndProject
Global
@ -47,22 +47,22 @@ Global
{D8E6FCCA-686A-0FF5-1956-9A7EE37839AA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D8E6FCCA-686A-0FF5-1956-9A7EE37839AA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D8E6FCCA-686A-0FF5-1956-9A7EE37839AA}.Release|Any CPU.Build.0 = Release|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Release|Any CPU.Build.0 = Release|Any CPU
{90FE4C7C-EB1B-93AA-DA9C-CA7BF2F58D4B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{90FE4C7C-EB1B-93AA-DA9C-CA7BF2F58D4B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{90FE4C7C-EB1B-93AA-DA9C-CA7BF2F58D4B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{90FE4C7C-EB1B-93AA-DA9C-CA7BF2F58D4B}.Release|Any CPU.Build.0 = Release|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Release|Any CPU.Build.0 = Release|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A1452F01-7562-84D8-FEFD-9FC63712C72A}.Release|Any CPU.Build.0 = Release|Any CPU
{FE183B87-8612-2E98-8A90-F248AAF50D10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FE183B87-8612-2E98-8A90-F248AAF50D10}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FE183B87-8612-2E98-8A90-F248AAF50D10}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FE183B87-8612-2E98-8A90-F248AAF50D10}.Release|Any CPU.Build.0 = Release|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D94325F2-C3DE-1CDD-02FC-08D656EC4C7A}.Release|Any CPU.Build.0 = Release|Any CPU
{81B7A7AE-8EC2-866E-0C01-1CE46F30A594}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{81B7A7AE-8EC2-866E-0C01-1CE46F30A594}.Debug|Any CPU.Build.0 = Debug|Any CPU
{81B7A7AE-8EC2-866E-0C01-1CE46F30A594}.Release|Any CPU.ActiveCfg = Release|Any CPU

View File

@ -5,8 +5,8 @@ AudioManager:
m_ObjectHideFlags: 0
serializedVersion: 2
m_Volume: 1
Rolloff Scale: 1
Doppler Factor: 1
Rolloff Scale: 0
Doppler Factor: 0
Default Speaker Mode: 2
m_SampleRate: 0
m_DSPBufferSize: 1024