fixed global step, now using exp decay
This commit is contained in:
parent
cba0f67ae2
commit
66589dfde3
|
@ -38,10 +38,10 @@ class Network:
|
||||||
# Can't remember the best learning_rate, look this up
|
# Can't remember the best learning_rate, look this up
|
||||||
self.max_learning_rate = 0.1
|
self.max_learning_rate = 0.1
|
||||||
self.min_learning_rate = 0.001
|
self.min_learning_rate = 0.001
|
||||||
self.learning_rate = 0.01
|
# self.learning_rate = 0.01
|
||||||
|
|
||||||
self.global_step = tf.Variable(0, trainable=False, name="global_step")
|
self.global_step = tf.Variable(0, trainable=False, name="global_step")
|
||||||
# self.learning_rate = tf.maximum(self.min_learning_rate, tf.train.exponential_decay(self.max_learning_rate, self.global_step, 50000, 0.96, staircase=True), name="learning_rate")
|
self.learning_rate = tf.maximum(self.min_learning_rate, tf.train.exponential_decay(self.max_learning_rate, self.global_step, 50000, 0.96, staircase=True), name="learning_rate")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -164,11 +164,10 @@ class Network:
|
||||||
|
|
||||||
# Restore trained episode count for model
|
# Restore trained episode count for model
|
||||||
episode_count_path = os.path.join(self.checkpoint_path, "episodes_trained")
|
episode_count_path = os.path.join(self.checkpoint_path, "episodes_trained")
|
||||||
if os.path.isfile(episode_count_path):
|
p if os.path.isfile(episode_count_path):
|
||||||
with open(episode_count_path, 'r') as f:
|
with open(episode_count_path, 'r') as f:
|
||||||
self.config['start_episode'] = int(f.read())
|
self.config['start_episode'] = int(f.read())
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
|
|
||||||
def make_move(self, sess, board, roll, player):
|
def make_move(self, sess, board, roll, player):
|
||||||
# print(Board.pretty(board))
|
# print(Board.pretty(board))
|
||||||
|
|
Loading…
Reference in New Issue
Block a user