crossentropy_loss.py 408 B

123456789101112131415161718192021222324
  1. import tensorflow as tf
  2. tf.random.set_seed(4323)
  3. x=tf.random.normal([1,3])
  4. w=tf.random.normal([3,2])
  5. b=tf.random.normal([2])
  6. y = tf.constant([0, 1])
  7. with tf.GradientTape() as tape:
  8. tape.watch([w, b])
  9. logits = (x@w+b)
  10. loss = tf.reduce_mean(tf.losses.categorical_crossentropy(y, logits, from_logits=True))
  11. grads = tape.gradient(loss, [w, b])
  12. print('w grad:', grads[0])
  13. print('b grad:', grads[1])