advancedskrald/ChessAR/Assets/detection_script_2.cs

229 lines
7.5 KiB
C#

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using System.Linq;
using Vuforia;
public class detection_script : MonoBehaviour
{
Mat cameraImageMat;
Mat stylizedMat = new Mat();
Mat greyMat = new Mat();
int width = 100;
int height = 100;
Texture2D outputTexture;
MatOfPoint2f dstPointsInv;
MatOfPoint2f imagePoints;
private Mat skullTextureMat;
// Start is called before the first frame update
void Start()
{
skullTextureMat = MatDisplay.LoadRGBATexture("Resources/flying_skull_tex.png");
dstPointsInv = new MatOfPoint2f();
dstPointsInv.alloc(4);
dstPointsInv.put(2, 0, 0, height);
dstPointsInv.put(3, 0, width, height);
dstPointsInv.put(0, 0, 0, 0);
dstPointsInv.put(1, 0, width, 0);
outputTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);
imagePoints = new MatOfPoint2f();
imagePoints.alloc(4);
}
// Update is called once per frame
void Update()
{
MatDisplay.SetCameraFoV(41.5f);
Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);
if (cameraImage != null)
{
if (cameraImageMat == null)
{
cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
}
cameraImageMat.put(0, 0, cameraImage.Pixels);
Imgproc.cvtColor(cameraImageMat, greyMat, Imgproc.COLOR_RGB2GRAY);
Imgproc.threshold(greyMat, stylizedMat, 69, 255, Imgproc.THRESH_BINARY);
List<MatOfPoint> contourList = new List<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(stylizedMat, contourList, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
//Imgproc.drawContours(cameraImageMat, contourList, -1, new Scalar(255,0,0), 2);
List<MatOfPoint2f> squareContours = new List<MatOfPoint2f>();
foreach (var contour in contourList)
{
MatOfPoint2f contour2f = new MatOfPoint2f();
contour.convertTo(contour2f, CvType.CV_32FC2);
double epsilon = 0.01f * Imgproc.arcLength(contour2f, true);
MatOfPoint2f approx = new MatOfPoint2f();
Imgproc.approxPolyDP(contour2f, approx, epsilon, true);
if (approx.toList().Count == 4)
{
Imgproc.drawContours(cameraImageMat, new List<MatOfPoint> { contour }, -1, new Scalar(255,0,0), 2);
squareContours.Add(approx);
}
}
MatOfPoint2f square = findSquare(squareContours);
MatOfPoint greenSquare = new MatOfPoint();
square.convertTo(greenSquare, CvType.CV_32S);
Imgproc.drawContours(cameraImageMat, new List<MatOfPoint> { greenSquare }, -1, new Scalar(0, 255, 0), 2);
MatDisplay.DisplayMat(cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
return;
if (square != null)
{
for (int i = 0; i < 4; i++)
{
print($"Square: {square.get(i, 0)}, idx {i}");
imagePoints.put(i, 0, square.get(i, 0)[0], square.get(i, 0)[1]);
}
//Debug draw points using OpenCV's drawing functions
Point imgPnt1 = new Point(imagePoints.get(0, 0));
Point imgPnt2 = new Point(imagePoints.get(1, 0));
Point imgPnt3 = new Point(imagePoints.get(2, 0));
Point imgPnt4 = new Point(imagePoints.get(3, 0));
Mat camImgCopy = cameraImageMat.clone();
Mat outputMat = camImgCopy.clone();
Imgproc.circle(cameraImageMat, imgPnt1, 5, new Scalar(255, 0, 0, 255));
Imgproc.circle(cameraImageMat, imgPnt2, 5, new Scalar(0, 255, 0, 255));
Imgproc.circle(cameraImageMat, imgPnt3, 5, new Scalar(0, 0, 255, 255));
Imgproc.circle(cameraImageMat, imgPnt4, 5, new Scalar(255, 255, 0, 255));
//Imgproc.drawContours(cameraImageMat, new List<MatOfPoint> { square }, -1, new Scalar(0, 255, 0), 2);
Mat homo = ComputeHomo(imagePoints, dstPointsInv);
Mat outputSkullMat = cameraImageMat.clone();
Imgproc.warpPerspective(skullTextureMat, outputSkullMat, homo.inv(), outputSkullMat.size());
Mat dstCam = cameraImageMat.clone();
Core.addWeighted(cameraImageMat, 0.95f, outputSkullMat, 0.7f, 0.0f, dstCam);
//Display the Mat that includes video feed and debug points
MatDisplay.DisplayMat(dstCam, MatDisplaySettings.FULL_BACKGROUND);
}
else
{
MatDisplay.DisplayMat(cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
}
}
}
MatOfPoint2f findSquare (List<MatOfPoint2f> squareContours)
{
foreach (var outer_square in squareContours)
{
var outer_maxX = outer_square.toList().Max(point => point.x);
var outer_maxY = outer_square.toList().Max(point => point.y);
var outer_minX = outer_square.toList().Min(point => point.x);
var outer_minY = outer_square.toList().Min(point => point.y);
foreach (var inner_square in squareContours)
{
var inner_maxX = inner_square.toList().Max(point => point.x);
var inner_maxY = inner_square.toList().Max(point => point.y);
var inner_minX = inner_square.toList().Min(point => point.x);
var inner_minY = inner_square.toList().Min(point => point.y);
if (outer_minX < inner_minX &&
outer_minY < inner_minY &&
outer_maxX > inner_maxX &&
outer_maxY > inner_maxY)
{
List<Point> sortedOuterSquare = outer_square.toList().OrderBy(p => p.x).ThenBy(p => p.y).ToList();
MatOfPoint2f res = new MatOfPoint2f();
for (int i = 0; i < 4; i++)
{
res.put(i, 0, sortedOuterSquare[i].x, sortedOuterSquare[i].y);
}
print("--------------------------------");
return res;
}
}
}
return null;
}
Mat ComputeHomo(MatOfPoint2f imgPoints, MatOfPoint2f destPoints)
{
Mat H = new Mat(8, 1, CvType.CV_32FC1);
Mat A = new Mat(8, 8, CvType.CV_32FC1);
Mat b = new Mat(8, 1, CvType.CV_32FC1);
for (int i = 0; i < 4; i++)
{
var u = destPoints.get(i, 0)[0];
var v = destPoints.get(i, 0)[1];
b.put(i * 2, 0, u);
b.put((i * 2) + 1, 0, v);
var x = imgPoints.get(i, 0)[0];
var y = imgPoints.get(i, 0)[1];
A.put(i * 2, 0, x, y, 1, 0, 0, 0, -u * x, -u * y);
A.put((i * 2) + 1, 0, 0, 0, 0, x, y, 1, -v * x, -v * y);
}
Core.solve(A, b, H);
Mat ShitsReal = new Mat(3, 3, CvType.CV_32FC1);
ShitsReal.put(0, 0, H.get(0, 0)[0], H.get(1, 0)[0], H.get(2, 0)[0]);
ShitsReal.put(1, 0, H.get(3, 0)[0], H.get(4, 0)[0], H.get(5, 0)[0]);
ShitsReal.put(2, 0, H.get(6, 0)[0], H.get(7, 0)[0], 1);
return ShitsReal;
}
}