From 715fd1a670a4045bda39347ed7439397224c4b63 Mon Sep 17 00:00:00 2001 From: Chen Qian Date: Thu, 8 Sep 2022 14:56:19 -0700 Subject: [PATCH] Code changes to get ready for an incoming Keras optimizer migration. Because the code subclasses the legacy Keras optimizer, we should explicitly use the legacy optimizer. PiperOrigin-RevId: 473092233 --- .../privacy/optimizers/dp_optimizer_keras.py | 15 ++++++++------- .../privacy/optimizers/dp_optimizer_test.py | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py index d64c772..6d7bd7e 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py @@ -20,11 +20,11 @@ from tensorflow_privacy.privacy.dp_query import gaussian_query def make_keras_optimizer_class(cls): - """Given a subclass of `tf.keras.optimizers.Optimizer`, returns a DP-SGD subclass of it. + """Given a subclass of `tf.keras.optimizers.legacy.Optimizer`, returns a DP-SGD subclass of it. Args: cls: Class from which to derive a DP subclass. Should be a subclass of - `tf.keras.optimizers.Optimizer`. + `tf.keras.optimizers.legacy.Optimizer`. Returns: A DP-SGD subclass of `cls`. @@ -123,11 +123,11 @@ def make_keras_optimizer_class(cls): called `steps_per_epoch` times per epoch, and one call only returns `one_step_batch_size` (instead of `effective_batch_size`) examples now. """.format( - base_class='tf.keras.optimizers.' + cls.__name__, + base_class='tf.keras.optimizers.legacy' + cls.__name__, short_base_class=cls.__name__, dp_keras_class='DPKeras' + cls.__name__) - # The class tf.keras.optimizers.Optimizer has two methods to compute + # The class tf.keras.optimizers.legacy.Optimizer has two methods to compute # gradients, `_compute_gradients` and `get_gradients`. The first works # with eager execution, while the second runs in graph mode and is used # by canned estimators. @@ -371,6 +371,7 @@ def make_keras_optimizer_class(cls): DPKerasAdagradOptimizer = make_keras_optimizer_class( - tf.keras.optimizers.Adagrad) -DPKerasAdamOptimizer = make_keras_optimizer_class(tf.keras.optimizers.Adam) -DPKerasSGDOptimizer = make_keras_optimizer_class(tf.keras.optimizers.SGD) + tf.keras.optimizers.legacy.Adagrad) +DPKerasAdamOptimizer = make_keras_optimizer_class( + tf.keras.optimizers.legacy.Adam) +DPKerasSGDOptimizer = make_keras_optimizer_class(tf.keras.optimizers.legacy.SGD) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_test.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_test.py index 6699fb5..6230ea1 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_test.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_test.py @@ -369,12 +369,12 @@ class DPOptimizerTest(tf.test.TestCase, parameterized.TestCase): def testWriteOutAndReloadAdam(self): optimizer_class = dp_optimizer.make_gaussian_optimizer_class( - tf.keras.optimizers.Adam) + tf.keras.optimizers.legacy.Adam) self._test_write_out_and_reload(optimizer_class) def testWriteOutAndReloadSGD(self): optimizer_class = dp_optimizer.make_gaussian_optimizer_class( - tf.keras.optimizers.SGD) + tf.keras.optimizers.legacy.SGD) self._test_write_out_and_reload(optimizer_class)