In dp_optimizer_keras.py, set the default value of unconnected gradients to be 'zero'.
PiperOrigin-RevId: 435759367
This commit is contained in:
parent
d21e492be6
commit
70ab071e23
1 changed files with 2 additions and 1 deletions
|
@ -256,7 +256,8 @@ def make_keras_optimizer_class(cls):
|
||||||
|
|
||||||
# Compute the per-microbatch losses using helpful jacobian method.
|
# Compute the per-microbatch losses using helpful jacobian method.
|
||||||
with tf.keras.backend.name_scope(self._name + '/gradients'):
|
with tf.keras.backend.name_scope(self._name + '/gradients'):
|
||||||
jacobian = tape.jacobian(microbatch_losses, var_list)
|
jacobian = tape.jacobian(
|
||||||
|
microbatch_losses, var_list, unconnected_gradients='zero')
|
||||||
|
|
||||||
# Clip gradients to given l2_norm_clip.
|
# Clip gradients to given l2_norm_clip.
|
||||||
def clip_gradients(g):
|
def clip_gradients(g):
|
||||||
|
|
Loading…
Reference in a new issue