From c9e4446a527046ead2643279e092b750c8ea8f14 Mon Sep 17 00:00:00 2001 From: Anders Ladefoged Date: Tue, 6 Mar 2018 12:19:04 +0100 Subject: [PATCH] Custom activation (2*tanh(x)) function implemented with tensorflow primitives. --- network.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/network.py b/network.py index 1ce3704..1c9f6a5 100644 --- a/network.py +++ b/network.py @@ -18,6 +18,10 @@ class Network: # TODO: Actually compile tensorflow properly os.environ["TF_CPP_MIN_LOG_LEVEL"]="2" + + def custom_tanh(self, x, name=None): + a = tf.Variable(2.00, tf.float32) + return tf.scalar_mul(a, tf.tanh(x, name)) def __init__(self, session): self.session = session @@ -40,12 +44,10 @@ class Network: b_1 = tf.zeros(hidden_size,) b_2 = tf.zeros(output_size,) - value_after_input = tf.sigmoid(tf.matmul(self.x, W_1) + b_1, name='hidden_layer') + value_after_input = self.custom_tanh(tf.matmul(self.x, W_1) + b_1, name='hidden_layer') # TODO: Remember to make this tanh * 2 - # self.value = tf.layers.dense(input=value_after_input, units=hidden_size, \ - # activation=self.custom_tanh, kernel_initializer=xavier_init()) - self.value = 2*tf.nn.tanh(tf.matmul(value_after_input, W_2) + b_2, name='output_layer') + self.value = self.custom_tanh(tf.matmul(value_after_input, W_2) + b_2, name='output_layer') # tf.reduce_sum basically finds the sum of it's input, so this gives the difference between the two values, in case they should be lists, which they might be if our input changes difference_in_values = tf.reduce_sum(self.value_next - self.value, name='difference') @@ -67,7 +69,6 @@ class Network: self.saver = tf.train.Saver(max_to_keep=1) self.session.run(tf.global_variables_initializer()) - def eval_state(self, state): # Run state through a network