Advertisement
davidparks21

Untitled

Jul 12th, 2019
176
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.04 KB | None | 0 0
  1. """ Q: Can you pass 2 gradients for the same variable to the optimizer? """
  2. import tensorflow as tf
  3.  
  4.  
  5. v = tf.get_variable(name='v', initializer=tf.constant(1.0), dtype=tf.float32)
  6.  
  7. l1 = v * 2
  8. l2 = v * 4
  9.  
  10. optimizer = tf.train.AdamOptimizer()
  11. g1 = optimizer.compute_gradients(loss=l1, var_list=[v])
  12. g2 = optimizer.compute_gradients(loss=l2, var_list=[v])
  13. apply_g1 = optimizer.apply_gradients(g1)
  14. apply_g2 = optimizer.apply_gradients(g2)
  15. apply_both = optimizer.apply_gradients(g1 + g2)
  16.  
  17. # We see below that applying individual updates is not equivalent to averaging the updates when multiple gradients apply to a single variable
  18. with tf.Session() as sess:
  19.     sess.run(tf.global_variables_initializer())
  20.     print(sess.run(v))
  21.     sess.run(apply_both)
  22.     print(sess.run(v))
  23.     sess.run(apply_both)
  24.     print(sess.run(v))
  25.  
  26.     sess.run(tf.global_variables_initializer())
  27.     print(sess.run(v))
  28.     sess.run(apply_g1)
  29.     sess.run(apply_g2)
  30.     print(sess.run(v))
  31.     sess.run(apply_g1)
  32.     sess.run(apply_g2)
  33.     print(sess.run(v))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement