Increase amount of sense being made.

This commit is contained in:
Casper 2019-04-16 23:29:41 +02:00
parent b29aa9beef
commit 822497e857
No known key found for this signature in database
GPG Key ID: 289CA03790535054
5 changed files with 461 additions and 55 deletions

409
ChessAR/.gitignore vendored Normal file
View File

@ -0,0 +1,409 @@
# Created by https://www.gitignore.io/api/unity,visualstudio,visualstudiocode
# Edit at https://www.gitignore.io/?templates=unity,visualstudio,visualstudiocode
### Unity ###
[Ll]ibrary/
[Tt]emp/
[Oo]bj/
[Bb]uild/
[Bb]uilds/
[Ll]ogs/
# Never ignore Asset meta data
![Aa]ssets/**/*.meta
# Uncomment this line if you wish to ignore the asset store tools plugin
# [Aa]ssets/AssetStoreTools*
# TextMesh Pro files
[Aa]ssets/TextMesh*Pro/
# Visual Studio cache directory
.vs/
# Gradle cache directory
.gradle/
# Autogenerated VS/MD/Consulo solution and project files
ExportedObj/
.consulo/
*.csproj
*.unityproj
*.sln
*.suo
*.tmp
*.user
*.userprefs
*.pidb
*.booproj
*.svd
*.pdb
*.mdb
*.opendb
*.VC.db
# Unity3D generated meta files
*.pidb.meta
*.pdb.meta
*.mdb.meta
# Unity3D generated file on crash reports
sysinfo.txt
# Builds
*.apk
*.unitypackage
# Crashlytics generated file
crashlytics-build.properties
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Ll]og/
# Visual Studio 2015/2017 cache/options directory
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opensdf
*.sdf
*.cachefile
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- Backup*.rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# End of https://www.gitignore.io/api/unity,visualstudio,visualstudiocode

View File

@ -1,12 +1,11 @@
import base64
import json
import sys
import cv2
import sys
import numpy as np
from runner import warp_board
from runner import find_keypoints
# Load base64 encoded image from stdin
stdin = sys.stdin.readline()
@ -15,17 +14,13 @@ img_array = np.frombuffer(stdin_decoded, dtype=np.uint8)
camera_img = cv2.imdecode(img_array, flags=cv2.COLOR_BGR2RGB)
camera_img = cv2.cvtColor(camera_img, cv2.COLOR_BGR2RGB)
# Warp board, saving the homography points as well
src_points = dst_points = []
#cv2.imshow("ppslpsl", camera_img)
#cv2.waitKey(0)
points1, points2 = warp_board(camera_img, src_points=src_points, dst_points=dst_points, short_circuit=True)
# Find keypoints in image and pass them back to unity
src_points, dst_points = find_keypoints(camera_img)
# Finally, output to stdout for unity to read
result = {
"src_points": [p.tolist() for p in points1],
"dst_points": [p.tolist() for p in points2],
"src_points": [p.tolist() for p in src_points],
"dst_points": [p.tolist() for p in dst_points],
}
print(json.dumps(result))

View File

@ -1,18 +1,15 @@
from os import path
from pathlib import Path
import cv2
import glob
import os
from datetime import datetime
from pathlib import Path
from typing import Tuple
import cv2
import numpy as np
from sklearn import cluster, metrics, svm
from sklearn import cluster, metrics, svm, neural_network
from sklearn.externals import joblib
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn import neural_network
from util import RANK, POSITION, imwrite, PIECE, COLOR, Squares, OUR_PIECES
@ -136,10 +133,14 @@ def train_pieces_svm_canny() -> None:
joblib.dump(classifier, f"classifiers/classifier_empty/white_piece_on_{square_color}_square.pkl")
def warp_board(camera_image, debug_image=None, src_points: list = None, dst_points: list = None, short_circuit=False) -> np.ndarray:
baseline = cv2.imread(str(here.joinpath("new_baseline_board.png")))
def find_keypoints(camera_image: np.ndarray,
baseline: np.ndarray = cv2.imread(str(here.joinpath("new_baseline_board.png"))),
debug=False) -> Tuple[np.ndarray, np.ndarray]:
"""
Find keypoints in raw camera image of board.
:return: (src points, dest points)
"""
camera_image_gray = cv2.cvtColor(camera_image, cv2.COLOR_BGR2GRAY)
baseline_gray = cv2.cvtColor(baseline, cv2.COLOR_BGR2GRAY)
@ -150,10 +151,6 @@ def warp_board(camera_image, debug_image=None, src_points: list = None, dst_poin
camera_image_keypoints, des = sift.compute(camera_image_gray, camera_image_keypoints)
baseline_keypoints, des2 = sift.compute(baseline_gray, baseline_keypoints)
if debug_image is not None:
cv2.drawKeypoints(camera_image, keypoints=camera_image_keypoints, outImage=debug_image)
cv2.imwrite("keypoints_img.jpg", camera_image)
# FLANN parameters
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=8)
@ -172,7 +169,13 @@ def warp_board(camera_image, debug_image=None, src_points: list = None, dst_poin
matchesMask[i] = [1, 0]
good_matches.append([m, n])
img3 = cv2.drawMatchesKnn(
if debug:
# Save keypoints
keypoints_image = camera_image.copy()
cv2.drawKeypoints(camera_image, keypoints=camera_image_keypoints, outImage=keypoints_image)
cv2.imwrite("keypoints.png", keypoints_image)
# Save matches
matches_image = cv2.drawMatchesKnn(
camera_image,
camera_image_keypoints,
baseline,
@ -184,27 +187,26 @@ def warp_board(camera_image, debug_image=None, src_points: list = None, dst_poin
matchesMask=matchesMask,
flags=0
)
cv2.imwrite("matches.jpg", img3)
cv2.imwrite("matches.png", matches_image)
# Extract location of good matches
points1 = np.zeros((len(good_matches), 2), dtype=np.float32)
points2 = np.zeros((len(good_matches), 2), dtype=np.float32)
src_points = np.zeros((len(good_matches), 2), dtype=np.float32)
dst_points = np.zeros((len(good_matches), 2), dtype=np.float32)
for i, (m, n) in enumerate(good_matches):
points1[i, :] = camera_image_keypoints[m.queryIdx].pt
points2[i, :] = baseline_keypoints[m.trainIdx].pt
src_points[i, :] = camera_image_keypoints[m.queryIdx].pt
dst_points[i, :] = baseline_keypoints[m.trainIdx].pt
if src_points is not None:
src_points.extend(points1)
if dst_points is not None:
dst_points.extend(points2)
return src_points, dst_points
if short_circuit:
return points1, points2
h, mask = cv2.findHomography(points1, points2, cv2.RANSAC)
def warp_board(camera_image: np.ndarray, debug=False) -> np.ndarray:
baseline = cv2.imread(str(here.joinpath("new_baseline_board.png")))
src_points, dst_points = find_keypoints(camera_image, baseline, debug=debug)
h, mask = cv2.findHomography(src_points, dst_points, cv2.RANSAC)
height, width, channels = baseline.shape
return cv2.warpPerspective(camera_image, h, (width, height))

View File

@ -1,11 +1,11 @@
from __future__ import annotations
import cv2
from enum import Enum
from functools import lru_cache
from pathlib import Path
from typing import NewType, NamedTuple, Dict, Tuple
from typing import NamedTuple, Dict, Tuple
import cv2
import numpy as np
from sklearn.externals import joblib
@ -76,10 +76,10 @@ class _Position(NamedTuple):
# POSITION.{A8, A7, ..., H1}
POSITION = Enum("POSITION", {str(_Position(f, r)): _Position(f, r) for f in FILE for r in RANK}, type=_Position)
POSITION = Enum("POSITION", {str(_Position(f, r)): _Position(f, r) for f in FILE for r in RANK}, type=_Position) # NOQA
# Squares is a dict mapping positions to square images, i.e. a board container during image processing
Squares = NewType("Squares", Dict[POSITION, np.ndarray])
Squares = Dict[POSITION, np.ndarray]
class Board(Dict[POSITION, PIECE]):