From 70ab071e2332694efaa7ad88240832f5b778d55c Mon Sep 17 00:00:00 2001 From: Steve Chien Date: Fri, 18 Mar 2022 16:09:59 -0700 Subject: [PATCH] In dp_optimizer_keras.py, set the default value of unconnected gradients to be 'zero'. PiperOrigin-RevId: 435759367 --- tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py index 6ecbb31..6d53a76 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py @@ -256,7 +256,8 @@ def make_keras_optimizer_class(cls): # Compute the per-microbatch losses using helpful jacobian method. with tf.keras.backend.name_scope(self._name + '/gradients'): - jacobian = tape.jacobian(microbatch_losses, var_list) + jacobian = tape.jacobian( + microbatch_losses, var_list, unconnected_gradients='zero') # Clip gradients to given l2_norm_clip. def clip_gradients(g):