From 49db04e3561638fc02795edb5774d322cdd1d7d1 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 1 Dec 2021 15:06:01 -0800 Subject: [PATCH] Overwrite the get_config method for dp_optimizer to reflect the actual parameters required(noise_multiplier, l2_norm_clip) PiperOrigin-RevId: 413520270 --- .../privacy/optimizers/dp_optimizer_keras.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py index 2fe2a7f..1ba11b9 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer_keras.py @@ -332,6 +332,25 @@ def make_keras_optimizer_class(cls): return final_grads + def get_config(self): + """Returns the config of the optimizer. + + An optimizer config is a Python dictionary (serializable) + containing the configuration of an optimizer. + The same optimizer can be reinstantiated later + (without any saved state) from this configuration. + + Returns: + Python dictionary. + """ + config = super(DPOptimizerClass, self).get_config() + config.update({ + 'l2_norm_clip': self._l2_norm_clip, + 'noise_multiplier': self._noise_multiplier, + 'num_microbatches': self._num_microbatches, + }) + return config + def apply_gradients(self, grads_and_vars, global_step=None, name=None): """DP-SGD version of base class method.""" assert self._was_dp_gradients_called, (