From 328795aa3665288d526b46b388fc440758587702 Mon Sep 17 00:00:00 2001 From: Shuang Song Date: Thu, 14 Jul 2022 12:14:23 -0700 Subject: [PATCH] Add assert that the training is private for TF1 vectorized optimizer. In Keras training in TF 2.0+, compute_gradients() is not called but apply_gradients() is called. W/o calling compute_gradients() dp gradient is not computed, and a normal gradient is used. PiperOrigin-RevId: 461021412 --- .../optimizers/dp_optimizer_vectorized.py | 13 ++++++++++++ .../dp_optimizer_vectorized_test.py | 20 +++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized.py index 1a6bb8c..68bcf31 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized.py @@ -103,6 +103,7 @@ def make_vectorized_optimizer_class(cls): self._l2_norm_clip = l2_norm_clip self._noise_multiplier = noise_multiplier self._num_microbatches = num_microbatches + self._was_compute_gradients_called = False def compute_gradients(self, loss, @@ -113,6 +114,7 @@ def make_vectorized_optimizer_class(cls): grad_loss=None, gradient_tape=None): """DP-SGD version of base class method.""" + self._was_compute_gradients_called = True if callable(loss): # TF is running in Eager mode raise NotImplementedError('Vectorized optimizer unavailable for TF2.') @@ -175,6 +177,17 @@ def make_vectorized_optimizer_class(cls): return list(zip(final_grads, var_list)) + def apply_gradients(self, grads_and_vars, global_step=None, name=None): + # pylint: disable=g-doc-args, g-doc-return-or-yield + """DP-SGD version of base class method.""" + assert self._was_compute_gradients_called, ( + 'compute_gradients() on the differentially private optimizer was not' + ' called. Which means that the training is not differentially ' + 'private. It happens for example in Keras training in TensorFlow ' + '2.0+.') + return super(DPOptimizerClass, self).apply_gradients( + grads_and_vars=grads_and_vars, global_step=global_step, name=name) + return DPOptimizerClass diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized_test.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized_test.py index 17119f7..de3cc54 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized_test.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_vectorized_test.py @@ -197,6 +197,26 @@ class DPOptimizerTest(tf.test.TestCase, parameterized.TestCase): # Test standard deviation is close to l2_norm_clip * noise_multiplier. self.assertNear(np.std(grads), 2.0 * 4.0, 0.5) + @parameterized.named_parameters(('DPGradientDescent', VectorizedDPSGD), + ('DPAdagrad', VectorizedDPAdagrad), + ('DPAdam', VectorizedDPAdam)) + def testAssertOnNoCallOfComputeGradients(self, cls): + opt = cls( + l2_norm_clip=4.0, + noise_multiplier=2.0, + num_microbatches=1, + learning_rate=2.0) + + with self.assertRaises(AssertionError): + grads_and_vars = tf.Variable([0.0]) + opt.apply_gradients(grads_and_vars) + + # Expect no call exception if compute_gradients is called. + var0 = tf.Variable([0.0]) + data0 = tf.Variable([[0.0]]) + grads_and_vars = opt.compute_gradients(self._loss(data0, var0), [var0]) + opt.apply_gradients(grads_and_vars) + if __name__ == '__main__': tf.test.main()