In dp_optimizer_keras.py, set the default value of unconnected gradients to be 'zero'.

PiperOrigin-RevId: 435759367
This commit is contained in:
Steve Chien 2022-03-18 16:09:59 -07:00 committed by A. Unique TensorFlower
parent d21e492be6
commit 70ab071e23

View file

@ -256,7 +256,8 @@ def make_keras_optimizer_class(cls):
# Compute the per-microbatch losses using helpful jacobian method.
with tf.keras.backend.name_scope(self._name + '/gradients'):
jacobian = tape.jacobian(microbatch_losses, var_list)
jacobian = tape.jacobian(
microbatch_losses, var_list, unconnected_gradients='zero')
# Clip gradients to given l2_norm_clip.
def clip_gradients(g):