From 3e4327daeeb8b728edbec453f6e29b64d799009a Mon Sep 17 00:00:00 2001 From: Alexander Munch-Hansen Date: Wed, 10 Apr 2019 13:39:50 +0200 Subject: [PATCH] Works a bit. --- main.py | 123 +++++++++++++++++++++--------------------------------- runner.py | 43 +++++++++++-------- util.py | 7 +++- 3 files changed, 78 insertions(+), 95 deletions(-) diff --git a/main.py b/main.py index f5d51cc0..49ff0b31 100644 --- a/main.py +++ b/main.py @@ -1,39 +1,39 @@ -import cv2 import sys -from collections import defaultdict +import warnings from datetime import datetime +from typing import List, Tuple +import cv2 import matplotlib.pyplot as plt import numpy as np +from sklearn.exceptions import DataConversionWarning import runner -from util import load_classifier, PIECE, COLOR, POSITION, Board, Squares, PieceAndColor -from sklearn.exceptions import DataConversionWarning -import warnings +from util import load_classifier, PIECE, COLOR, POSITION, Board, Squares, PieceAndColor, OUR_PIECES warnings.filterwarnings(action='ignore', category=DataConversionWarning) np.set_printoptions(threshold=sys.maxsize) -def identify_piece(image: np.ndarray, sift : cv2.xfeatures2d_SIFT, empty_bias=False) -> PieceAndColor: +def identify_piece(image: np.ndarray, position: POSITION, sift: cv2.xfeatures2d_SIFT) -> PieceAndColor: centers = np.load("training_data/centers.npy") - probs = defaultdict(lambda: defaultdict(float)) best = 0 + probs = {p.name: {} for p in OUR_PIECES} + best_piece = best_color = None - for piece in PIECE: + for piece in OUR_PIECES: for color in COLOR: #color = runner.compute_color(file, rank) classifier = load_classifier(f"classifiers/classifier_{piece}/{color}.pkl") features = runner.generate_bag_of_words(image, centers, sift) prob = classifier.predict_proba(features) - probs[piece][color] = prob[0, 1] + probs[piece.name][color.name] = prob[0, 1] + print(f"{piece}, {color}, {prob[0, 1]}") + #if prob[0, 1] > best and color == position.color: # can only be best if correct color. Iterating through both colors for debugging only if prob[0, 1] > best: + best = prob[0, 1] best_piece, best_color = piece, color - print(probs) - - if empty_bias: - probs[PIECE.EMPTY] *= 1.2 - + #print(probs) return best_piece, best_color @@ -41,7 +41,7 @@ def pred_test(position: POSITION, mystery_image=None, empty_bias=False): sift = cv2.xfeatures2d.SIFT_create() if mystery_image is None: mystery_image = cv2.imread("training_images/rook/white/rook_training_D4_2.png") - probs = identify_piece(mystery_image, sift, empty_bias=empty_bias) + probs = identify_piece(mystery_image, position, sift) return probs @@ -55,7 +55,7 @@ def build_board_from_squares(squares: Squares) -> Board: board = Board() counter = 0 for position, square in squares.values(): - likely_piece = identify_piece(square, sift) + likely_piece = identify_piece(square, position, sift) board[position] = likely_piece if likely_piece != PIECE.EMPTY: counter += 1 @@ -74,7 +74,7 @@ def test_entire_board() -> None: print(board) -def predict(square: np.ndarray, position: POSITION) -> PIECE: +def predict_empty(square: np.ndarray, position: POSITION) -> PIECE: y, x = np.histogram(square.ravel(), bins=32, range=[0, 256]) left, right = x[:-1], x[1:] @@ -89,9 +89,9 @@ def predict(square: np.ndarray, position: POSITION) -> PIECE: #for color in COLOR: empty_classifier = load_classifier(f"classifiers/classifier_empty/white_piece_on_{position.color}_square.pkl") prob = empty_classifier.predict_proba(np.array(y).reshape(1, -1)) - print(f"{position}, {position.color}: {prob[0, 1]}") + #print(f"{position}, {position.color}: {prob[0, 1]}") if prob[0, 1] > 0.95: - print(f"{position} is empty") + # print(f"{position} is empty") return PIECE.EMPTY return None @@ -124,82 +124,53 @@ def remove_most_empties(warped): if np.max(segment) > 0 and not np.all([x < (164 ** 2) * 0.2 for x in pls]) and ( np.max(segment) >= 3 or np.all([x < (164 ** 2) * 0.942 for x in pls])): - print(f"{position} is nonempty") + #print(f"{position} is nonempty") non_empties.append([position, src]) empty += 1 - print(64 - empty) + #print(64 - empty) return non_empties -if __name__ == '__main__': - - - #board = cv2.imread("whole_boards/boards_for_empty/board_1554286488.605142_rank_3.png") - board = cv2.imread("whole_boards/boards_for_empty/board_1554288606.075646_rank_1.png") - - warped = runner.warp_board(board) +def find_occupied_squares(warped: np.ndarray) -> List[Tuple[POSITION, np.ndarray]]: non_empties = remove_most_empties(warped) - - #empty_classifier = load_classifier(f"classifiers/classifier_empty/white_piece_on_white_square.pkl") - #print(empty_classifier.predict_proba(np.array([0]*16).reshape(1, -1))[0, 1]) - - #exit() - - - counter = 0 completely_non_empties = [] for position, square in non_empties: - #predict(square, position) + if predict_empty(square, position) != PIECE.EMPTY: + completely_non_empties.append((position, square)) - #y, x = np.histogram(square.ravel(), bins=32, range=[0, 256]) - #left, right = x[:-1], x[1:] - #X = np.array([left, right]).T.flatten() - #Y = np.array([y, y]).T.flatten() - #plt.plot(X, Y) - #plt.xlabel(f"{position}") - #plt.show() + return completely_non_empties +if __name__ == '__main__': - if predict(square,position) == PIECE.EMPTY: - counter += 1 - else: - completely_non_empties.append([position, square]) + #runner.train_pieces_svm() + #board = cv2.imread("whole_boards/boards_for_empty/board_1554286488.605142_rank_3.png") + board = cv2.imread("whole_boards/boards_for_empty/board_1554286515.323962_rank_3.png") + warped = runner.warp_board(board) - - - print(counter) - for position, square in completely_non_empties: - cv2.imshow(f"{position}", square) + rook_square = runner.get_square(warped, POSITION.H3) + knight_square = runner.get_square(warped, POSITION.D3) + cv2.imshow("lel", rook_square) + cv2.imshow("lil", knight_square) + rook_out = cv2.Canny(rook_square, 50, 55, L2gradient=True) + knight_out = cv2.Canny(knight_square, 50, 55, L2gradient=True) + cv2.imshow("lal", rook_out) + cv2.imshow("lul", knight_out) cv2.waitKey(0) exit() + occupied = find_occupied_squares(warped) - square_img = runner.get_square(warped, "D", 2) - - gray_square_img = cv2.cvtColor(square_img, cv2.COLOR_BGR2GRAY) - print(cv2.meanStdDev(gray_square_img)[1]) - print(cv2.meanStdDev(square_img)[1]) - cv2.imshow("square", square_img) + sift = cv2.xfeatures2d.SIFT_create() + for position, square in occupied: + print("---"*15) + piece, color = identify_piece(square, position, sift) + print(f"{piece} on {position}") + text_color = 255 if color == COLOR.WHITE else 0 + cv2.putText(square, f"{position} {piece.name}", (0, 50), cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(text_color,)*3, thickness=3) + cv2.imshow(f"{position}", square) cv2.waitKey(0) - - - - - print(pred_test("C", 2, square_img)) - - sift: cv2.xfeatures2d_SIFT = cv2.xfeatures2d.SIFT_create() - gray = cv2.cvtColor(square_img, cv2.COLOR_BGR2GRAY) - - kp, desc = sift.detectAndCompute(gray, None) - - cv2.drawKeypoints(square_img, kp, square_img) - - cv2.imshow("kp", square_img) - cv2.waitKey(0) - - diff --git a/runner.py b/runner.py index 831cd287..94e3536d 100644 --- a/runner.py +++ b/runner.py @@ -10,20 +10,26 @@ from sklearn.externals import joblib from sklearn.pipeline import make_pipeline from sklearn.preprocessing import StandardScaler -from util import RANK, POSITION, imwrite, PIECE, COLOR, Squares +from util import RANK, POSITION, imwrite, PIECE, COLOR, Squares, OUR_PIECES def generate_centers(number_of_clusters, sift: cv2.xfeatures2d_SIFT): - features = [] - for piece in PIECE: + features = None + for piece in OUR_PIECES: for color in COLOR: - for filename in glob.glob(os.path.join("training_images", piece, f"{color}_square", "*.png")): + for filename in glob.glob(f"training_images/{piece}/{color}_square/*.png"): image = cv2.imread(filename) #image = selective_search(image, use_fast=True) gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) kp, desc = sift.detectAndCompute(gray, None) print(f"{piece}, {color}, {filename}") - features.append(desc) + + if features is None: + features = np.array(desc) + else: + print(f"{piece}, {color}, {filename}") + features = np.vstack((features, desc)) + features = np.array(features) k_means = cluster.KMeans(number_of_clusters) k_means.fit(features) @@ -55,9 +61,9 @@ def do_pre_processing() -> None: np.save("training_data/centers", centers) - for piece in PIECE: + for piece in OUR_PIECES: for color in COLOR: - for filename in glob.glob(os.path.join("training_images", piece, f"{color}_square", "*.png")): + for filename in glob.glob(f"training_images/{piece}/{color}_square/*.png"): image = cv2.imread(filename) #image = selective_search(image, image_name=filename, use_fast=True) bow_features = generate_bag_of_words(image, centers, sift) @@ -67,10 +73,10 @@ def do_pre_processing() -> None: def load_training_data(piece: PIECE, color: COLOR) -> Tuple[np.array, np.array]: X = [] Y = [] - for p in PIECE: - for filename in glob.glob(os.path.join("training_data", piece, f"{color}_square", "*.npy")): + for p in OUR_PIECES: + for filename in glob.glob(f"training_data/{piece}/{color}_square/*.npy"): data = np.load(filename) - X.append(data) + X.append(data[0]) Y.append(p == piece) return np.array(X), np.array(Y) @@ -79,8 +85,8 @@ def train_empty_or_piece_hist() -> None: for square_color in COLOR: X = [] Y = [] - for piece in (PIECE.EMPTY, PIECE.ROOK, PIECE.KNIGHT): - for filename in glob.glob(os.path.join("training_images", f"{piece}", f"{square_color}_square", "*.png")): + for piece in OUR_PIECES + (PIECE.EMPTY,): + for filename in glob.glob(f"training_images/{piece}/{square_color}_square/*.png"): img = cv2.imread(filename) y, x = np.histogram(img.ravel(), bins=32, range=[0, 256]) X.append(y) @@ -93,15 +99,16 @@ def train_empty_or_piece_hist() -> None: def train_pieces_svm() -> None: - for piece in PIECE: + for piece in OUR_PIECES: for color in COLOR: - # TODO: Consider removing empty from total_weights, so all classifiers do not consider empty pieces - total_weights = len(glob.glob(os.path.join("training_images", "*", f"{color}_square", "*.png"))) - current_weight = len(glob.glob(os.path.join("training_images", piece, f"{color}_square", "*.png"))) + total_weights = len(glob.glob(f"training_images/{piece}/{color}_square/*.png")) - print(f"Trainig for piece: {piece}") + for piece in OUR_PIECES: + for color in COLOR: + current_weight = len(glob.glob(f"training_images/{piece}/{color}_square/*.png")) + print(f"Training for piece: {piece}") X, Y = load_training_data(piece, color) - classifier = svm.SVC(class_weight={0: current_weight, 1: total_weights - current_weight}, probability=True) + classifier = svm.SVC(C=10, gamma=0.01, class_weight={0: 45, 1: 1}, probability=True) classifier.fit(X, Y) joblib.dump(classifier, f"classifiers/classifier_{piece}/{color}.pkl") diff --git a/util.py b/util.py index 92ced2a4..7732d27e 100644 --- a/util.py +++ b/util.py @@ -33,6 +33,11 @@ class PIECE(Enum): PieceAndColor = Tuple[PIECE, COLOR] +OUR_PIECES = ( + PIECE.ROOK, + PIECE.KNIGHT, +) + class FILE(int, Enum): A = 1 @@ -88,5 +93,5 @@ def imwrite(*args, **kwargs): @lru_cache() def load_classifier(filename): - print(f"Loading classifier {filename}") + # print(f"Loading classifier {filename}") return joblib.load(filename)