forked from 626_privacy/tensorflow_privacy
Overwrite the get_config method for dp_optimizer to reflect the actual parameters required(noise_multiplier, l2_norm_clip)
PiperOrigin-RevId: 413520270
This commit is contained in:
parent
290ecf7797
commit
49db04e356
1 changed files with 19 additions and 0 deletions
|
@ -332,6 +332,25 @@ def make_keras_optimizer_class(cls):
|
||||||
|
|
||||||
return final_grads
|
return final_grads
|
||||||
|
|
||||||
|
def get_config(self):
|
||||||
|
"""Returns the config of the optimizer.
|
||||||
|
|
||||||
|
An optimizer config is a Python dictionary (serializable)
|
||||||
|
containing the configuration of an optimizer.
|
||||||
|
The same optimizer can be reinstantiated later
|
||||||
|
(without any saved state) from this configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Python dictionary.
|
||||||
|
"""
|
||||||
|
config = super(DPOptimizerClass, self).get_config()
|
||||||
|
config.update({
|
||||||
|
'l2_norm_clip': self._l2_norm_clip,
|
||||||
|
'noise_multiplier': self._noise_multiplier,
|
||||||
|
'num_microbatches': self._num_microbatches,
|
||||||
|
})
|
||||||
|
return config
|
||||||
|
|
||||||
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
|
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
|
||||||
"""DP-SGD version of base class method."""
|
"""DP-SGD version of base class method."""
|
||||||
assert self._was_dp_gradients_called, (
|
assert self._was_dp_gradients_called, (
|
||||||
|
|
Loading…
Reference in a new issue