新增打印层级功能

This commit is contained in:
terric 2023-11-24 13:13:10 +08:00
parent 541927fbdd
commit 2e0150979d
23 changed files with 254 additions and 170 deletions

View File

@ -540,7 +540,7 @@ namespace OpenCVCompact
}
}
if (webCamTexture == null)
Debug.Log("Cannot find camera device " + requestedDeviceName + ".");
LogPrint.Log("Cannot find camera device " + requestedDeviceName + ".");
}
if (webCamTexture == null)
@ -617,7 +617,7 @@ namespace OpenCVCompact
}
else if (webCamTexture.didUpdateThisFrame)
{
Debug.Log("WebCamTextureToMatHelper:: " + "devicename:" + webCamTexture.deviceName + " name:" + webCamTexture.name + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS
LogPrint.Log("WebCamTextureToMatHelper:: " + "devicename:" + webCamTexture.deviceName + " name:" + webCamTexture.name + " width:" + webCamTexture.width + " height:" + webCamTexture.height + " fps:" + webCamTexture.requestedFPS
+ " videoRotationAngle:" + webCamTexture.videoRotationAngle + " videoVerticallyMirrored:" + webCamTexture.videoVerticallyMirrored + " isFrongFacing:" + webCamDevice.isFrontFacing);
if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height)

View File

@ -70,13 +70,13 @@ namespace OpenCVCompact
if (!mat.isContinuous())
{
Debug.LogError("mat.isContinuous() must be true.");
LogPrint.Error("mat.isContinuous() must be true.");
return;
}
//if (texture2D.mipmapCount != 1)
//{
// Debug.LogError("texture2D.mipmapCount must be 1.");
// LogPrint.Error("texture2D.mipmapCount must be 1.");
// return;
//}
@ -240,8 +240,8 @@ namespace OpenCVCompact
if (request.isError)
{
#endif
Debug.LogWarning(request.error);
Debug.LogWarning(request.responseCode);
LogPrint.Warning(request.error);
LogPrint.Warning(request.responseCode);
return String.Empty;
}
@ -259,7 +259,7 @@ namespace OpenCVCompact
if (!string.IsNullOrEmpty(request.error))
{
Debug.LogWarning(request.error);
LogPrint.Warning(request.error);
return String.Empty;
}
@ -401,8 +401,8 @@ namespace OpenCVCompact
if (request.isError)
{
#endif
Debug.LogWarning(request.error);
Debug.LogWarning(request.responseCode);
LogPrint.Warning(request.error);
LogPrint.Warning(request.responseCode);
if (errorOccurred != null)
{
@ -440,7 +440,7 @@ namespace OpenCVCompact
if (!string.IsNullOrEmpty(request.error))
{
Debug.LogWarning(request.error);
LogPrint.Warning(request.error);
if (errorOccurred != null)
{

View File

@ -11,7 +11,7 @@ public class ClearingSettlementUI : UIPopupBase
if (pageData == null || pageData.Length < 2)
{
Debug.LogError("ClearingSettlementUI page parameter is invalid");
LogPrint.Error("ClearingSettlementUI page parameter is invalid");
return;
}

View File

@ -53,7 +53,7 @@ public class CVEstimator : Singleton<CVEstimator>
{
if (!IsInited)
{
Debug.LogError("CVEstimator is not inited. ");
LogPrint.Error("CVEstimator is not inited. ");
return;
}
_estimateThread.Start();
@ -93,15 +93,15 @@ public class CVEstimator : Singleton<CVEstimator>
Mat rgbaMat = YogaManager.Instance.RgbaMat;
if (rgbaMat == null)
{
//Debug.Log("WebCamTexture is null. ");
//Debug.Log("Re-Estimation.");
LogPrint.Log("WebCamTexture is null. ");
LogPrint.Log("Re-Estimation.");
continue; //重新检测
}
Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);
if (!YogaManager.Instance.CurrentEstimator.Esitmate(bgrMat, rgbaMat, out List<Point> points))
{
Debug.LogWarning("Pose estimation failed. Re-Estimating.");
LogPrint.Warning("Pose estimation failed. Re-Estimating.");
continue; //重新检测
}
@ -109,14 +109,14 @@ public class CVEstimator : Singleton<CVEstimator>
YogaManager.Instance.CurrPersonPoints = points;
//等待到下一个更新周期
if (DateTime.Now - startTime > _refreshRate)
{
startTime = DateTime.Now;
}
else
{
Thread.Sleep(_refreshRate - (DateTime.Now - startTime));
}
//if (DateTime.Now - startTime > _refreshRate)
//{
// startTime = DateTime.Now;
//}
//else
//{
// Thread.Sleep(_refreshRate - (DateTime.Now - startTime));
//}
}
bgrMat.Dispose();
}

View File

@ -19,7 +19,7 @@ public class MediaPipeEstimator : Estimator
var personDetectModelPath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.MediapipePersonDetect]);
if (string.IsNullOrEmpty(personDetectModelPath))
{
Debug.LogError(YogaConfig.MODEL_PATHS[ModelType.MediapipePersonDetect] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
LogPrint.Error(YogaConfig.MODEL_PATHS[ModelType.MediapipePersonDetect] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{
@ -29,7 +29,7 @@ public class MediaPipeEstimator : Estimator
var poseModelPath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.MediapipePose]);
if (string.IsNullOrEmpty(poseModelPath))
{
Debug.LogError(YogaConfig.MODEL_PATHS[ModelType.MediapipePose] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
LogPrint.Error(YogaConfig.MODEL_PATHS[ModelType.MediapipePose] + " is not loaded. Please read “StreamingAssets/OpenCVForUnity/dnn/setup_dnn_module.pdf” to make the necessary setup.");
}
else
{

View File

@ -33,7 +33,7 @@ public class OpenPoseEsimater : Estimator
var modelFilePath = Utils.getFilePath(YogaConfig.MODEL_PATHS[ModelType.OpenPose]);
if (string.IsNullOrEmpty(modelFilePath))
{
Debug.LogError("modelFilePath is empty. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
LogPrint.Error("modelFilePath is empty. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
return;
}
@ -52,7 +52,7 @@ public class OpenPoseEsimater : Estimator
points = null;
if (_objectDetector == null)
{
Debug.LogWarning("ObjectDetector is not ready. ");
LogPrint.Warning("ObjectDetector is not ready. ");
return false; //重新检测
}
@ -103,7 +103,7 @@ public class OpenPoseEsimater : Estimator
if (!hasValidObject) //没有检测到人体
{
Debug.Log("No person block found. Re-Estimation.");
LogPrint.Log("No person block found. Re-Estimation.");
return false; //重新检测
}
@ -141,7 +141,7 @@ public class OpenPoseEsimater : Estimator
if (!YogaManager.Instance.ActionCheckPoints(points))
{
Debug.Log("ActionCheckPoints failed. Re-Estimation.");
LogPrint.Log("ActionCheckPoints failed. Re-Estimation.");
return false; //重新检测
}
return true;

View File

@ -25,7 +25,7 @@ public class HandsUp : PoseBase
var angleR = Vector3.Angle(rArmVector, rArmBaseVector);
var angleL = Vector3.Angle(lArmVector, lArmBaseVector);
Debug.LogWarning("angleR:" + angleR + " angleL:" + angleL);
LogPrint.Warning("angleR:" + angleR + " angleL:" + angleL);
if (angleR < 10 || angleL < 10)
return false;

View File

@ -2,7 +2,7 @@
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using Serenegiant.UVC;
using System;
using System.Collections;
using System.Collections.Generic;
@ -10,24 +10,31 @@ using System.Linq;
using UnityEngine;
using UnityEngine.UI;
#if UNITY_ANDROID && !UNITY_EDITOR
using Serenegiant.UVC;
#endif
namespace Yoga
{
public class MotionCaptureManager : CameraCaptureManagerBase, IUVCDrawer
public class MotionCaptureManager : CameraCaptureManagerBase
#if UNITY_ANDROID && !UNITY_EDITOR
, IUVCDrawer
#endif
{
public WebCamTextureToMatHelper _webCamTextureToMatHelper;
private Texture2D texture;
public float SamplingRate = 0.3f;
public RawImage TargetDisplayer;
private Mat _bgrMat;
private bool _isOnCamCapture = false;
private Mat _bgrMat;
private Texture2D _displayTexture;
private bool _isCorrectAction = false;
private Material _flipImg;
public bool IsOnCamCapture { get => _isOnCamCapture; internal set => _isOnCamCapture = value; }
public WebCamTextureToMatHelper WebCamTextureToMatHelper => _webCamTextureToMatHelper;
private bool _isCorrectAction = false;
private void OnEnable()
{
EventManager.Instance.AddEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
@ -38,7 +45,6 @@ namespace Yoga
EventManager.Instance.AddEventListener(YogaEventType.ChangeCaptureCameraDevice, ChangeCaptureCameraDevice);
}
private void OnDisable()
{
EventManager.Instance.RemoveEventListener(YogaEventType.StartMotionCapture, OnStartMotionCapture);
@ -58,17 +64,19 @@ namespace Yoga
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
_webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
//tool loading
_flipImg = Resources.Load<Material>("Materials/Unlit_FlipHorizontal");
_webCamTextureToMatHelper.Initialize();
Utils.setDebugMode(true); //打印日志
GlobalData.Instance.CameraDeviceType = CameraDeviceType.USB; //test
#if UNITY_ANDROID && !UNITY_EDITOR
var devices = UVCManager.Instance.GetAttachedDevices();
#endif
CVEstimator.Instance.Init();//初始化姿态检测
}
#if UNITY_ANDROID && !UNITY_EDITOR
#region UVC
public UVCFilter[] UVCFilters;
private Texture SavedTexture;
@ -83,7 +91,7 @@ namespace Yoga
public void OnUVCDetachEvent(UVCManager manager, UVCDevice device)
{
Debug.Log("OnUVCDetachEvent " + device);
LogPrint.Log("OnUVCDetachEvent " + device);
}
public bool CanDraw(UVCManager manager, UVCDevice device)
@ -104,7 +112,7 @@ namespace Yoga
private void HandleOnStartPreview(Texture tex)
{
SavedTexture = TargetDisplayer.texture;
TargetDisplayer.texture = texture;
TargetDisplayer.texture = _displayTexture;
}
private void HandleOnStopPreview()
{
@ -124,42 +132,44 @@ namespace Yoga
SavedTexture = null;
}
#endregion
#endif
public static Texture2D HorizontalFlipTexture(Texture2D texture)
public Texture2D HorizontalFlipTexture(Texture2D texture)
{
//得到图片的宽高
int width = texture.width;
int height = texture.height;
Texture2D flipTexture = new Texture2D(width, height);
for (int i = 0; i < width; i++)
{
flipTexture.SetPixels(i, 0, 1, height, texture.GetPixels(width - i - 1, 0, 1, height));
}
flipTexture.Apply();
return flipTexture;
Texture2D retVal = new Texture2D(texture.width, texture.height);
RenderTexture tmp = RenderTexture.GetTemporary(texture.width, texture.height, 0, RenderTextureFormat.ARGB32);
Graphics.Blit(texture, tmp, _flipImg);
RenderTexture.active = tmp;
retVal.ReadPixels(new UnityEngine.Rect(0, 0, texture.width, texture.height), 0, 0);
RenderTexture.ReleaseTemporary(tmp);
return retVal;
}
private Mat GetMat()
{
Mat img = null;
#if UNITY_ANDROID && !UNITY_EDITOR
if (GlobalData.Instance.CameraDeviceType == CameraDeviceType.USB &&
UVCManager.Instance != null &&
UVCManager.Instance.GetAttachedDevices() != null && UVCManager.Instance.GetAttachedDevices().Count > 0)
{
var devices = UVCManager.Instance.GetAttachedDevices();
for (int i = 0; i < devices.Count; i++)
{
UVCManager.CameraInfo device = devices[i];
LogPrint.Error($"id:{i},pid:{device.Pid},vid:{device.Vid},deviceName:{device.DeviceName}");
}
var formatRefMat = _webCamTextureToMatHelper.GetMat();
var picCaptured = devices.FirstOrDefault().previewTexture;
Texture2D tmpTex = new Texture2D(picCaptured.width, picCaptured.height);
var picCaptured = devices.FirstOrDefault().previewTexture;
Texture2D tmpTex = new Texture2D(picCaptured.width, picCaptured.height);
Utils.textureToTexture2D(picCaptured, tmpTex);
tmpTex = HorizontalFlipTexture(tmpTex);//picCaptured texture水平反转
img = new Mat(formatRefMat.rows(), formatRefMat.cols(), formatRefMat.type());
Utils.texture2DToMat(tmpTex, img);
}
else
#endif
{
if (_webCamTextureToMatHelper.IsPlaying() && _webCamTextureToMatHelper.DidUpdateThisFrame())
{
@ -217,7 +227,7 @@ namespace Yoga
}
}
Utils.matToTexture2D(img, texture);
Utils.matToTexture2D(img, _displayTexture);
}
#region Event Func
@ -247,7 +257,7 @@ namespace Yoga
if (startTime.AddMilliseconds(100) < DateTime.Now)
{
Debug.LogError("GetActionBasePoint timeout");
LogPrint.Error("GetActionBasePoint timeout");
break;
}
}
@ -258,7 +268,7 @@ namespace Yoga
var type = args.FirstOrDefault();
if (type == null)
{
Debug.LogError("EstimateAction type is null");
LogPrint.Error("EstimateAction type is null");
return;
}
AvatarAction actionType = (AvatarAction)args.FirstOrDefault();
@ -276,8 +286,8 @@ namespace Yoga
}
if (startTime.AddMilliseconds(100) < DateTime.Now)
{
Debug.LogWarning("请摆正姿势");
Debug.LogWarning("EstimateAction timeout");
LogPrint.Warning("请摆正姿势");
LogPrint.Warning("EstimateAction timeout");
break;
}
}
@ -322,18 +332,18 @@ namespace Yoga
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
LogPrint.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = _webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(webCamTextureMat, texture);
_displayTexture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);
Utils.matToTexture2D(webCamTextureMat, _displayTexture);
//gameObject.GetComponent<Renderer>().material.mainTexture = texture;
TargetDisplayer.texture = texture;
TargetDisplayer.texture = _displayTexture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols() / 10, webCamTextureMat.rows() / 10, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
LogPrint.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
@ -355,21 +365,21 @@ namespace Yoga
//event call
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
LogPrint.Log("OnWebCamTextureToMatHelperDisposed");
if (_bgrMat != null)
_bgrMat.Dispose();
if (texture != null)
if (_displayTexture != null)
{
Texture2D.Destroy(texture);
texture = null;
Texture2D.Destroy(_displayTexture);
_displayTexture = null;
}
}
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
LogPrint.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
public void ScoreUpdate()

View File

@ -107,7 +107,7 @@ namespace Yoga
// cheack
if (image.channels() != 3)
{
Debug.Log("The input image must be in BGR format.");
LogPrint.Log("The input image must be in BGR format.");
return new Mat();
}
@ -229,7 +229,7 @@ namespace Yoga
sb.AppendLine(String.Format("box: {0:0} {1:0} {2:0} {3:0}", box[0], box[1], box[2], box[3]));
}
Debug.Log(sb);
LogPrint.Log(sb);
}
}
@ -272,7 +272,7 @@ namespace Yoga
}
catch (System.Exception ex)
{
Debug.LogError(ex.Message);
LogPrint.Error(ex.Message);
return null;
}
finally

View File

@ -11,7 +11,7 @@ public partial class SROptions
var devices = WebCamTexture.devices;
if (devices.Length == 0)
{
Debug.LogError("No camera detected!");
LogPrint.Error("No camera detected!");
return;
}
GlobalData.Instance.CameraIndex++;
@ -29,14 +29,14 @@ public partial class SROptions
var devices = WebCamTexture.devices;
if (devices.Length == 0)
{
Debug.LogError("No camera detected!");
LogPrint.Error("No camera detected!");
return;
}
Debug.LogWarning($"Current camera: {devices[GlobalData.Instance.CameraIndex].name}");
LogPrint.Warning($"Current camera: {devices[GlobalData.Instance.CameraIndex].name}");
foreach (var device in devices)
{
Debug.LogWarning($"Camera: {device.name}");
LogPrint.Warning($"Camera: {device.name}");
}
}

View File

@ -0,0 +1,63 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class LogPrint
{
public static PrintLevel Level = PrintLevel.Normal;
public static void Log(object message, PrintLevel logLevel = PrintLevel.Details)
{
if (logLevel >= Level)
{
Debug.Log(message);
}
}
public static void Log(object message, UnityEngine.Object sender, PrintLevel logLevel = PrintLevel.Details)
{
if (logLevel >= Level)
{
Debug.Log(message, sender);
}
}
public static void Warning(object message, PrintLevel level = PrintLevel.Details)
{
if (level >= Level)
{
Debug.LogWarning(message);
}
}
public static void Warning(object message, UnityEngine.Object sender, PrintLevel logLevel = PrintLevel.Details)
{
if (logLevel >= Level)
{
Debug.LogWarning(message, sender);
}
}
public static void Error(object message, PrintLevel level = PrintLevel.Details)
{
if (level >= Level)
{
Debug.LogError(message);
}
}
public static void Exception(Exception ex, PrintLevel level = PrintLevel.Details)
{
if (level >= Level)
{
Debug.LogException(ex);
}
}
}
public enum PrintLevel
{
Details = 0,
Normal = 10,
Important = 100,
Critical = 200,
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4e022dee4b9ec8b4facc2144669a1cf8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -129,8 +129,8 @@ public class FaceDetectManager : MonoBehaviour
initFaceLndmrkRes = dnnUtils.InitFaceLandmarkDetect(dnnLndmrkDetectModelFilePath);
if (initFaceLndmrkRes == false)
{
Debug.LogError(dnnLndmrkDetectModelFilePath + " file is not loaded.");
Debug.LogError(dnnLndmrkDetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(dnnLndmrkDetectModelFilePath + " file is not loaded.");
LogPrint.Error(dnnLndmrkDetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
dnnUtils.InitHeadPoseEstimation();
@ -138,34 +138,34 @@ public class FaceDetectManager : MonoBehaviour
int initFaceAttrRes = dnnUtils.InitFaceAttribNet_7(dnnFaceAttr7ModelFilePath);
if (initFaceAttrRes <= 0)
{
Debug.LogError(dnnFaceAttr7ModelFilePath + " file is not loaded.");
Debug.LogError(dnnFaceAttr7ModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(dnnFaceAttr7ModelFilePath + " file is not loaded.");
LogPrint.Error(dnnFaceAttr7ModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
int initFaceExpRes = dnnUtils.InitFaceExpressionNet_7(dnnFaceExpModelFilePath);
if (initFaceExpRes <= 0)
{
Debug.LogError(dnnFaceExpModelFilePath + " file is not loaded.");
Debug.LogError(dnnFaceExpModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(dnnFaceExpModelFilePath + " file is not loaded.");
LogPrint.Error(dnnFaceExpModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
int initMultiTinyFaceDetectRes = dnnUtils.InitMultiTinyFaceDetector(multiTinyFaceDetectModelFilePath);
if (initMultiTinyFaceDetectRes <= 0)
{
Debug.LogError(multiTinyFaceDetectModelFilePath + " file is not loaded.");
Debug.LogError(multiTinyFaceDetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(multiTinyFaceDetectModelFilePath + " file is not loaded.");
LogPrint.Error(multiTinyFaceDetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
int initYOLOV3DetectRes = dnnUtils.InitYOLOV3Detector(YOLOV3DetectModelFilePath, YOLOV3DetectParamFilePath);
if (initYOLOV3DetectRes == -1)
{
Debug.LogError(YOLOV3DetectModelFilePath + " file is not loaded.");
Debug.LogError(YOLOV3DetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(YOLOV3DetectModelFilePath + " file is not loaded.");
LogPrint.Error(YOLOV3DetectModelFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
else if (initYOLOV3DetectRes == -2)
{
Debug.LogError(YOLOV3DetectParamFilePath + " file is not loaded.");
Debug.LogError(YOLOV3DetectParamFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
LogPrint.Error(YOLOV3DetectParamFilePath + " file is not loaded.");
LogPrint.Error(YOLOV3DetectParamFilePath + " file is not existed on StreamingAssets Folder. Please copy from “Assets/FaceAnalyzer/StreamingAssets/” to “Assets/StreamingAssets/” folder.");
}
dnnUtils.InitLabels_7();
@ -242,7 +242,7 @@ public class FaceDetectManager : MonoBehaviour
idx = idx + 1;
}
Debug.LogWarning(text.Substring(0, text.Length - 1));
LogPrint.Warning(text.Substring(0, text.Length - 1));
}
prob.Dispose();
@ -260,7 +260,7 @@ public class FaceDetectManager : MonoBehaviour
mat4Display = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC4);
mat4DisplayTexture = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC4);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
LogPrint.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
@ -279,18 +279,18 @@ public class FaceDetectManager : MonoBehaviour
dnnUtils.InitHeadPoseEstimationCameraInfo(webCamTextureMat.cols(), webCamTextureMat.rows());
webCamReady = true;
Debug.Log("OnWebCamTextureToMatHelperInitialized");
LogPrint.Log("OnWebCamTextureToMatHelperInitialized");
}
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
LogPrint.Log("OnWebCamTextureToMatHelperDisposed");
}
public void OnWebCamTextureToMatHelperErrorOccurred(OpenCVCompact.WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
LogPrint.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
}

View File

@ -28,7 +28,7 @@ public class GuideUI : UIPanelBase
_guide = GameObject.FindWithTag("Guide");
if (_guide == null)
{
Debug.LogError("Guide is null");
LogPrint.Error("Guide is null");
}
_effectAnimator = transform.Find("Content").GetComponentInChildren<Animator>();
@ -63,7 +63,7 @@ public class GuideUI : UIPanelBase
var animeType = args.FirstOrDefault();
if (animeType == null)
{
Debug.LogError("PlayAnimation animeType is null");
LogPrint.Error("PlayAnimation animeType is null");
return;
}
@ -75,7 +75,7 @@ public class GuideUI : UIPanelBase
//args[0] = successCount args[1] = excutedCount args[2] = totalCount
if (args.Length < 3)
{
Debug.LogError("UpdateSuccessCount args is not enough");
LogPrint.Error("UpdateSuccessCount args is not enough");
return;
}
@ -113,7 +113,7 @@ public class GuideUI : UIPanelBase
if (pageData[0] == null)
{
Debug.LogError("Guide is null");
LogPrint.Error("Guide is null");
return;
}
@ -121,7 +121,7 @@ public class GuideUI : UIPanelBase
_totalSeconds = data.TotalSeconds;
if (_totalSeconds == 0)
{
Debug.LogError("TotalSeconds is 0");
LogPrint.Error("TotalSeconds is 0");
return;
}

View File

@ -66,7 +66,7 @@ public static class YogaConfig
{
if (!BODY_PARTS.ContainsKey(tagName))
{
Debug.LogError("TagName is not exist");
LogPrint.Error("TagName is not exist");
return -1;
}
return BODY_PARTS[tagName];
@ -76,7 +76,7 @@ public static class YogaConfig
{
if (index < 0 || index >= BODY_PARTS.Count)
{
Debug.LogError("Index is not exist");
LogPrint.Error("Index is not exist");
return "";
}
return BODY_PARTS.FirstOrDefault(x => x.Value == index).Key;
@ -86,7 +86,7 @@ public static class YogaConfig
{
if (!BODY_PARTS.ContainsKey(tagName))
{
Debug.LogError("TagName is not exist");
LogPrint.Error("TagName is not exist");
return new Point(-1, -1);
}
@ -97,7 +97,7 @@ public static class YogaConfig
{
if (!BODY_PARTS.ContainsKey(tagName))
{
Debug.LogError("TagName is not exist");
LogPrint.Error("TagName is not exist");
return new Point(-1, -1);
}
@ -111,7 +111,7 @@ public static class YogaConfig
{
if (!BODY_PARTS.ContainsKey(tagName))
{
Debug.LogError("TagName is not exist");
LogPrint.Error("TagName is not exist");
return new Vector2(-1, -1);
}
@ -131,7 +131,7 @@ public static class YogaConfig
var point = points[BODY_PARTS[partName]];
if (point == new Point(-1, -1))
{
Debug.Log($"{partName}不在画面中");
LogPrint.Log($"{partName}不在画面中");
return false;
}

View File

@ -54,7 +54,7 @@ public class YogaDataLoader
return data[index];
else
{
Debug.LogError("YogaDataLoader.LoadData: index out of range");
LogPrint.Error("YogaDataLoader.LoadData: index out of range");
}
return null;

View File

@ -120,7 +120,7 @@ public class YogaManager : MonoSingleton<YogaManager>
{
if (!_actions.ContainsKey(actionType))
{
Debug.LogError("ActionType is not exist");
LogPrint.Error("ActionType is not exist");
return false;
}
var result = _actions[actionType].CheckPose(personPoints);
@ -131,7 +131,7 @@ public class YogaManager : MonoSingleton<YogaManager>
{
if (points == null || points.Count == 0)
{
//Debug.Log("ActionCheckPoints points is null");
//LogPrint.Log("ActionCheckPoints points is null");
return false;
}
@ -139,14 +139,14 @@ public class YogaManager : MonoSingleton<YogaManager>
//{
// Point p = points[i];
// if (p != new Point(-1, -1))
// Debug.LogWarning($"ActionPoints p({i}): {i.tagName()}, value: {p.x},{p.y}");
// LogPrint.Warning($"ActionPoints p({i}): {i.tagName()}, value: {p.x},{p.y}");
//}
foreach (var p in Action.MustPoints)
{
if (!p.IsValid(points)) //有一个点不符合 返回false
{
//Debug.LogError($"ActionCheckPoints failed, {p} is not valid");
//LogPrint.Error($"ActionCheckPoints failed, {p} is not valid");
return false;
}
}

View File

@ -68,7 +68,7 @@ public class CubeHandler : MonoBehaviour,
public void OnPointerClick(PointerEventData eventData)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log("OnClick:");
LogPrint.Log("OnClick:");
#endif
}
}

View File

@ -82,7 +82,7 @@ namespace Serenegiant {
{ // タッチしたとき
touchEvent.state = TouchState.Began;
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log("タッチした:");
LogPrint.Log("タッチした:");
#endif
}
@ -90,7 +90,7 @@ namespace Serenegiant {
{ // 離したとき
touchEvent.state = TouchState.Ended;
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log("タッチした:");
LogPrint.Log("タッチした:");
#endif
}
@ -98,7 +98,7 @@ namespace Serenegiant {
{ // 押し続けているとき
touchEvent.state = TouchState.Moved;
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log("押し続けている");
LogPrint.Log("押し続けている");
#endif
}

View File

@ -61,7 +61,7 @@ namespace Serenegiant
void Awake()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Awake:");
LogPrint.Log($"{TAG}Awake:");
#endif
#if UNITY_ANDROID
Input.backButtonLeavesApp = true; // 端末のバックキーでアプリを終了できるようにする
@ -78,7 +78,7 @@ namespace Serenegiant
public void OnStartEvent()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnStartEvent:");
LogPrint.Log($"{TAG}OnStartEvent:");
#endif
}
@ -88,7 +88,7 @@ namespace Serenegiant
public void OnResumeEvent()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnResumeEvent:");
LogPrint.Log($"{TAG}OnResumeEvent:");
#endif
LifecycleEvent?.Invoke(true);
}
@ -99,7 +99,7 @@ namespace Serenegiant
public void OnPauseEvent()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnPauseEvent:");
LogPrint.Log($"{TAG}OnPauseEvent:");
#endif
LifecycleEvent?.Invoke(false);
}
@ -110,7 +110,7 @@ namespace Serenegiant
public void OnStopEvent()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnStopEvent:");
LogPrint.Log($"{TAG}OnStopEvent:");
#endif
}
@ -120,7 +120,7 @@ namespace Serenegiant
public void OnPermissionGrant()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnPermissionGrant:");
LogPrint.Log($"{TAG}OnPermissionGrant:");
#endif
grantResult = PermissionGrantResult.PERMISSION_GRANT;
isPermissionRequesting = false;
@ -132,7 +132,7 @@ namespace Serenegiant
public void OnPermissionDeny()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnPermissionDeny:");
LogPrint.Log($"{TAG}OnPermissionDeny:");
#endif
grantResult = PermissionGrantResult.PERMISSION_DENY;
isPermissionRequesting = false;
@ -144,7 +144,7 @@ namespace Serenegiant
public void OnPermissionDenyAndNeverAskAgain()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnPermissionDenyAndNeverAskAgain:");
LogPrint.Log($"{TAG}OnPermissionDenyAndNeverAskAgain:");
#endif
grantResult = PermissionGrantResult.PERMISSION_DENY_AND_NEVER_ASK_AGAIN;
isPermissionRequesting = false;
@ -158,7 +158,7 @@ namespace Serenegiant
private void Initialize()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Initialize:{gameObject.name}");
LogPrint.Log($"{TAG}Initialize:{gameObject.name}");
#endif
using (AndroidJavaClass clazz = new AndroidJavaClass(FQCN_PLUGIN))
{
@ -204,7 +204,7 @@ namespace Serenegiant
public static IEnumerator RequestPermission(string permission, OnPermission callback)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}GrantPermission:{permission}");
LogPrint.Log($"{TAG}GrantPermission:{permission}");
#endif
if (!HasPermission(permission))
{
@ -245,7 +245,7 @@ namespace Serenegiant
public static IEnumerator GrantPermission(string permission, OnPermission callback)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}GrantPermission:{permission}");
LogPrint.Log($"{TAG}GrantPermission:{permission}");
#endif
if (!HasPermission(permission))
{
@ -284,7 +284,7 @@ namespace Serenegiant
public static IEnumerator GrantCameraPermission(OnPermission callback)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}GrantCameraPermission:");
LogPrint.Log($"{TAG}GrantCameraPermission:");
#endif
if (CheckAndroidVersion(23))
{

View File

@ -67,7 +67,7 @@ namespace Serenegiant.UVC
void Start()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Start:");
LogPrint.Log($"{TAG}Start:");
#endif
UpdateTarget();
@ -91,7 +91,7 @@ namespace Serenegiant.UVC
public bool OnUVCAttachEvent(UVCManager manager, UVCDevice device)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnUVCAttachEvent:{device}");
LogPrint.Log($"{TAG}OnUVCAttachEvent:{device}");
#endif
// XXX 今の実装では基本的に全てのUVC機器を受け入れる
// ただしTHETA SとTHETA VとTHETA Z1は映像を取得できないインターフェースがあるのでオミットする
@ -112,7 +112,7 @@ namespace Serenegiant.UVC
public void OnUVCDetachEvent(UVCManager manager, UVCDevice device)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnUVCDetachEvent:{device}");
LogPrint.Log($"{TAG}OnUVCDetachEvent:{device}");
#endif
}
@ -126,26 +126,26 @@ namespace Serenegiant.UVC
// public SupportedFormats.Size OnUVCSelectSize(UVCManager manager, UVCDevice device, SupportedFormats formats)
// {
//#if (!NDEBUG && DEBUG && ENABLE_LOG)
// Debug.Log($"{TAG}OnUVCSelectSize:{device}");
// LogPrint.Log($"{TAG}OnUVCSelectSize:{device}");
//#endif
// if (device.IsTHETA_V || device.IsTHETA_Z1)
// {
//#if (!NDEBUG && DEBUG && ENABLE_LOG)
// Debug.Log($"{TAG}OnUVCSelectSize:THETA V/Z1");
// LogPrint.Log($"{TAG}OnUVCSelectSize:THETA V/Z1");
//#endif
// return FindSize(formats, 3840, 1920);
// }
// else if (device.IsTHETA_S)
// {
//#if (!NDEBUG && DEBUG && ENABLE_LOG)
// Debug.Log($"{TAG}OnUVCSelectSize:THETA S");
// LogPrint.Log($"{TAG}OnUVCSelectSize:THETA S");
//#endif
// return FindSize(formats, 1920, 1080);
// }
// else
// {
//#if (!NDEBUG && DEBUG && ENABLE_LOG)
// Debug.Log($"{TAG}OnUVCSelectSize:other UVC device,{device}");
// LogPrint.Log($"{TAG}OnUVCSelectSize:other UVC device,{device}");
//#endif
// return formats.Find(DefaultWidth, DefaultHeight);
// }
@ -172,7 +172,7 @@ namespace Serenegiant.UVC
public void OnUVCStartEvent(UVCManager manager, UVCDevice device, Texture tex)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnUVCStartEvent:{device}");
LogPrint.Log($"{TAG}OnUVCStartEvent:{device}");
#endif
HandleOnStartPreview(tex);
}
@ -186,7 +186,7 @@ namespace Serenegiant.UVC
public void OnUVCStopEvent(UVCManager manager, UVCDevice device)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnUVCStopEvent:{device}");
LogPrint.Log($"{TAG}OnUVCStopEvent:{device}");
#endif
HandleOnStopPreview();
}
@ -214,7 +214,7 @@ namespace Serenegiant.UVC
found = true;
}
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}UpdateTarget:material={material}");
LogPrint.Log($"{TAG}UpdateTarget:material={material}");
#endif
}
i++;
@ -313,7 +313,7 @@ namespace Serenegiant.UVC
}
catch
{
Debug.Log($"{TAG}RestoreTexture:Exception cought");
LogPrint.Log($"{TAG}RestoreTexture:Exception cought");
}
SavedTextures[i] = null;
quaternions[i] = Quaternion.identity;
@ -335,7 +335,7 @@ namespace Serenegiant.UVC
private void HandleOnStartPreview(Texture tex)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStartPreview:({tex})");
LogPrint.Log($"{TAG}HandleOnStartPreview:({tex})");
#endif
int i = 0;
foreach (var target in TargetMaterials)
@ -343,7 +343,7 @@ namespace Serenegiant.UVC
if (target is Material)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStartPreview:assign Texture to Material({target})");
LogPrint.Log($"{TAG}HandleOnStartPreview:assign Texture to Material({target})");
#endif
SavedTextures[i++] = (target as Material).mainTexture;
(target as Material).mainTexture = tex;
@ -351,7 +351,7 @@ namespace Serenegiant.UVC
else if (target is RawImage)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStartPreview:assign Texture to RawImage({target})");
LogPrint.Log($"{TAG}HandleOnStartPreview:assign Texture to RawImage({target})");
#endif
SavedTextures[i++] = (target as RawImage).texture;
(target as RawImage).texture = tex;
@ -365,12 +365,12 @@ namespace Serenegiant.UVC
private void HandleOnStopPreview()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStopPreview:");
LogPrint.Log($"{TAG}HandleOnStopPreview:");
#endif
// 描画先のテクスチャをもとに戻す
RestoreTexture();
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStopPreview:finished");
LogPrint.Log($"{TAG}HandleOnStopPreview:finished");
#endif
}

View File

@ -122,7 +122,7 @@ namespace Serenegiant.UVC
}
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Match({device}):result={result}");
LogPrint.Log($"{TAG}Match({device}):result={result}");
#endif
return result;
}

View File

@ -251,7 +251,7 @@ namespace Serenegiant.UVC
{
DontDestroyOnLoad(gameObject);
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Start:");
LogPrint.Log($"{TAG}Start:");
#endif
mainContext = SynchronizationContext.Current;
callback = OnDeviceChangedCallbackManager.Add(this);
@ -262,28 +262,28 @@ namespace Serenegiant.UVC
#if (!NDEBUG && DEBUG && ENABLE_LOG)
void OnApplicationFocus()
{
Debug.Log($"{TAG}OnApplicationFocus:");
LogPrint.Log($"{TAG}OnApplicationFocus:");
}
#endif
#if (!NDEBUG && DEBUG && ENABLE_LOG)
void OnApplicationPause(bool pauseStatus)
{
Debug.Log($"{TAG}OnApplicationPause:{pauseStatus}");
LogPrint.Log($"{TAG}OnApplicationPause:{pauseStatus}");
}
#endif
#if (!NDEBUG && DEBUG && ENABLE_LOG)
void OnApplicationQuits()
{
Debug.Log($"{TAG}OnApplicationQuits:");
LogPrint.Log($"{TAG}OnApplicationQuits:");
}
#endif
void OnDestroy()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnDestroy:");
LogPrint.Log($"{TAG}OnDestroy:");
#endif
StopAll();
OnDeviceChangedCallbackManager.Remove(this);
@ -296,13 +296,13 @@ namespace Serenegiant.UVC
{
var id = UVCDevice.GetId(devicePtr);
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnDeviceChangedInternal:id={id},attached={attached}");
LogPrint.Log($"{TAG}OnDeviceChangedInternal:id={id},attached={attached}");
#endif
if (attached)
{
UVCDevice device = new UVCDevice(devicePtr);
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnDeviceChangedInternal:device={device.ToString()}");
LogPrint.Log($"{TAG}OnDeviceChangedInternal:device={device.ToString()}");
#endif
if (HandleOnAttachEvent(device))
{
@ -407,7 +407,7 @@ namespace Serenegiant.UVC
// {
// var size = (drawer as IUVCDrawer).OnUVCSelectSize(this, info.device, supportedVideoSize);
//#if (!NDEBUG && DEBUG && ENABLE_LOG)
// Debug.Log($"{TAG}StartPreview:selected={size}");
// LogPrint.Log($"{TAG}StartPreview:selected={size}");
//#endif
// if (size != null)
// { // 一番最初に見つかった描画可能なIUVCDrawersがnull以外を返せばそれを使う
@ -421,7 +421,7 @@ namespace Serenegiant.UVC
// FIXME 対応解像度の確認処理
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}StartPreview:({width}x{height}),id={device.id}");
LogPrint.Log($"{TAG}StartPreview:({width}x{height}),id={device.id}");
#endif
int[] frameTypes = {
PreferH264 ? FRAME_TYPE_H264 : FRAME_TYPE_MJPEG,
@ -441,7 +441,7 @@ namespace Serenegiant.UVC
mainContext.Post(__ =>
{ // テクスチャの生成はメインスレッドで行わないといけない
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}映像受け取り用テクスチャ生成:({width}x{height})");
LogPrint.Log($"{TAG}映像受け取り用テクスチャ生成:({width}x{height})");
#endif
Texture2D tex = new Texture2D(
width, height,
@ -501,7 +501,7 @@ namespace Serenegiant.UVC
else
{
bool hasDrawer = false;
Debug.LogWarning("UVCDrawers: " + UVCDrawers.Length);
LogPrint.Warning("UVCDrawers: " + UVCDrawers.Length);
foreach (var drawer in UVCDrawers)
{
if (drawer is IUVCDrawer)
@ -525,7 +525,7 @@ namespace Serenegiant.UVC
/// <returns></returns>
bool FilterDevice(string deviceName)
{
Debug.Log(deviceName+" :"+ (deviceName == "/dev/bus/usb/002/008"));
LogPrint.Log(deviceName+" :"+ (deviceName == "/dev/bus/usb/002/008"));
return deviceName == "/dev/bus/usb/002/008";
}
@ -554,7 +554,7 @@ namespace Serenegiant.UVC
void HandleOnStartPreviewEvent(CameraInfo info)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStartPreviewEvent:({info})");
LogPrint.Log($"{TAG}HandleOnStartPreviewEvent:({info})");
#endif
if ((info != null) && info.IsPreviewing && (UVCDrawers != null))
{
@ -569,7 +569,7 @@ namespace Serenegiant.UVC
else
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStartPreviewEvent:No UVCDrawers");
LogPrint.Log($"{TAG}HandleOnStartPreviewEvent:No UVCDrawers");
#endif
}
}
@ -581,7 +581,7 @@ namespace Serenegiant.UVC
void HandleOnStopPreviewEvent(CameraInfo info)
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}HandleOnStopPreviewEvent:({info})");
LogPrint.Log($"{TAG}HandleOnStopPreviewEvent:({info})");
#endif
if (UVCDrawers != null)
{
@ -632,14 +632,14 @@ namespace Serenegiant.UVC
private IEnumerator Initialize()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}Initialize:");
LogPrint.Log($"{TAG}Initialize:");
#endif
if (AndroidUtils.CheckAndroidVersion(28))
{
yield return AndroidUtils.GrantCameraPermission((string permission, AndroidUtils.PermissionGrantResult result) =>
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}OnPermission:{permission}={result}");
LogPrint.Log($"{TAG}OnPermission:{permission}={result}");
#endif
switch (result)
{
@ -673,7 +673,7 @@ namespace Serenegiant.UVC
private void InitPlugin()
{
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}InitPlugin:");
LogPrint.Log($"{TAG}InitPlugin:");
#endif
// IUVCDrawersが割り当てられているかどうかをチェック
var hasDrawer = false;
@ -692,7 +692,7 @@ namespace Serenegiant.UVC
{ // インスペクタでIUVCDrawerが設定されていないときは
// このスクリプトがaddされているゲームオブジェクトからの取得を試みる
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}InitPlugin:has no IUVCDrawer, try to get from gameObject");
LogPrint.Log($"{TAG}InitPlugin:has no IUVCDrawer, try to get from gameObject");
#endif
var objs = FindObjectsByType<CameraCaptureManagerBase>(FindObjectsSortMode.None);
List<Component> drawers = new List<Component>();
@ -701,7 +701,7 @@ namespace Serenegiant.UVC
drawers.AddRange(obj.GetComponents(typeof(IUVCDrawer)));
}
Debug.LogWarning("drawers: " + drawers.Count);
LogPrint.Warning("drawers: " + drawers.Count);
if ((drawers != null) && (drawers.Count > 0))
{
UVCDrawers = new Component[drawers.Count];
@ -713,7 +713,7 @@ namespace Serenegiant.UVC
}
}
#if (!NDEBUG && DEBUG && ENABLE_LOG)
Debug.Log($"{TAG}InitPlugin:num drawers={UVCDrawers.Length}");
LogPrint.Log($"{TAG}InitPlugin:num drawers={UVCDrawers.Length}");
#endif
// aandusbのDeviceDetectorを読み込み要求
using (AndroidJavaClass clazz = new AndroidJavaClass(FQCN_DETECTOR))
@ -806,7 +806,7 @@ namespace Serenegiant.UVC
var manager = sManagers.ContainsKey(id) ? sManagers[id] : null;
if (manager != null)
{
Debug.Log($"OnDeviceChanged{id} attached:{attached}");
LogPrint.Log($"OnDeviceChanged{id} attached:{attached}");
manager.OnDeviceChanged(devicePtr, attached);
}
}