Custom activation (2*tanh(x)) function implemented with tensorflow primitives.
This commit is contained in:
parent
5845edf084
commit
c9e4446a52
11
network.py
11
network.py
|
@ -18,6 +18,10 @@ class Network:
|
|||
|
||||
# TODO: Actually compile tensorflow properly
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"]="2"
|
||||
|
||||
def custom_tanh(self, x, name=None):
|
||||
a = tf.Variable(2.00, tf.float32)
|
||||
return tf.scalar_mul(a, tf.tanh(x, name))
|
||||
|
||||
def __init__(self, session):
|
||||
self.session = session
|
||||
|
@ -40,12 +44,10 @@ class Network:
|
|||
b_1 = tf.zeros(hidden_size,)
|
||||
b_2 = tf.zeros(output_size,)
|
||||
|
||||
value_after_input = tf.sigmoid(tf.matmul(self.x, W_1) + b_1, name='hidden_layer')
|
||||
value_after_input = self.custom_tanh(tf.matmul(self.x, W_1) + b_1, name='hidden_layer')
|
||||
|
||||
# TODO: Remember to make this tanh * 2
|
||||
# self.value = tf.layers.dense(input=value_after_input, units=hidden_size, \
|
||||
# activation=self.custom_tanh, kernel_initializer=xavier_init())
|
||||
self.value = 2*tf.nn.tanh(tf.matmul(value_after_input, W_2) + b_2, name='output_layer')
|
||||
self.value = self.custom_tanh(tf.matmul(value_after_input, W_2) + b_2, name='output_layer')
|
||||
|
||||
# tf.reduce_sum basically finds the sum of it's input, so this gives the difference between the two values, in case they should be lists, which they might be if our input changes
|
||||
difference_in_values = tf.reduce_sum(self.value_next - self.value, name='difference')
|
||||
|
@ -67,7 +69,6 @@ class Network:
|
|||
self.saver = tf.train.Saver(max_to_keep=1)
|
||||
self.session.run(tf.global_variables_initializer())
|
||||
|
||||
|
||||
def eval_state(self, state):
|
||||
# Run state through a network
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user