arskrald/AR-2/Assets/OpenCVForUnity/Examples/MainModules/objdetect/FaceDetectionExample/FaceDetectionWebCamTextureExample.cs

249 lines
9.0 KiB
C#
Raw Normal View History

2019-02-18 21:22:15 +00:00
using UnityEngine;
using UnityEngine.SceneManagement;
using System;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
namespace OpenCVForUnityExample
{
/// <summary>
/// Face Detection WebCamTexture Example
/// An example of detecting human face in a image of WebCamTexture using the CascadeClassifier class.
/// http://docs.opencv.org/3.2.0/db/d28/tutorial_cascade_classifier.html
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class FaceDetectionWebCamTextureExample : MonoBehaviour
{
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The faces.
/// </summary>
MatOfRect faces;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
#if UNITY_WEBGL && !UNITY_EDITOR
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start ()
{
fpsMonitor = GetComponent<FpsMonitor> ();
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
#if UNITY_WEBGL && !UNITY_EDITOR
getFilePath_Coroutine = Utils.getFilePathAsync ("lbpcascade_frontalface.xml", (result) => {
getFilePath_Coroutine = null;
cascade = new CascadeClassifier ();
cascade.load (result);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
webCamTextureToMatHelper.Initialize ();
});
StartCoroutine (getFilePath_Coroutine);
#else
cascade = new CascadeClassifier ();
cascade.load (Utils.getFilePath ("lbpcascade_frontalface.xml"));
// cascade.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
#if !UNITY_WSA_10_0
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
webCamTextureToMatHelper.Initialize ();
#endif
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null) {
fpsMonitor.Add ("width", webCamTextureMat.width ().ToString ());
fpsMonitor.Add ("height", webCamTextureMat.height ().ToString ());
fpsMonitor.Add ("orientation", Screen.orientation.ToString ());
}
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
faces = new MatOfRect ();
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose ();
if (texture != null) {
Texture2D.Destroy (texture);
texture = null;
}
if (faces != null)
faces.Dispose ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist (grayMat, grayMat);
if (cascade != null)
cascade.detectMultiScale (grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size (grayMat.cols () * 0.2, grayMat.rows () * 0.2), new Size ());
OpenCVForUnity.CoreModule.Rect[] rects = faces.toArray ();
for (int i = 0; i < rects.Length; i++) {
// Debug.Log ("detect faces " + rects [i]);
Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
}
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D (rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy ()
{
webCamTextureToMatHelper.Dispose ();
if (cascade != null)
cascade.Dispose ();
#if UNITY_WEBGL && !UNITY_EDITOR
if (getFilePath_Coroutine != null) {
StopCoroutine (getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick ()
{
SceneManager.LoadScene ("OpenCVForUnityExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick ()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
}
}
}