자동 미분
<tf.Variable 'Variable:0' shape=() dtype=float32, numpy=3.0>
with tf.GradientTape() as tape:
y = x ** 2
<tf.Tensor: shape=(), dtype=float32, numpy=9.0>
grad = tape.gradient(y, x)
<tf.Tensor: shape=(), dtype=float32, numpy=6.0>
optimizer = tf.keras.optimizers.Adam(learning_rate=0.1)
optimizer.apply_gradients([(grad, x)])
<tf.Variable 'UnreadVariable' shape=() dtype=int64, numpy=1>
<tf.Variable 'Variable:0' shape=() dtype=float32, numpy=2.9>
with tf.GradientTape() as tape:
y = x ** 2
<tf.Tensor: shape=(), dtype=float32, numpy=8.410001>
for i in range(30):
with tf.GradientTape() as tape:
y = x ** 2
grad = tape.gradient(y, x)
optimizer.apply_gradients([(grad, x)])
print(x.numpy(), y.numpy())