|
|
|
|
|
|
""" |
|
|
|
combined_input = tf.concat([encoded_state, encoded_action], axis=1) |
|
|
|
hidden = combined_input |
|
|
|
if not self.transfer: |
|
|
|
hidden = tf.stop_gradient(hidden) |
|
|
|
#if not self.transfer: |
|
|
|
# hidden = tf.stop_gradient(hidden) |
|
|
|
|
|
|
|
for i in range(forward_layers): |
|
|
|
hidden = tf.layers.dense( |
|
|
|
|
|
|
combined_input = tf.concat([encoded_state, encoded_action], axis=1) |
|
|
|
hidden = combined_input |
|
|
|
|
|
|
|
if not self.transfer: |
|
|
|
hidden = tf.stop_gradient(hidden) |
|
|
|
#if not self.transfer: |
|
|
|
# hidden = tf.stop_gradient(hidden) |
|
|
|
|
|
|
|
for i in range(forward_layers): |
|
|
|
hidden = tf.layers.dense( |
|
|
|