Ensure that apply_gradients in dp_optimizer.py is using intended arguments for calls to superclass.
PiperOrigin-RevId: 417654563
This commit is contained in:
parent
347b99d412
commit
c6576f60c4
1 changed files with 2 additions and 2 deletions
|
@ -267,8 +267,8 @@ def make_optimizer_class(cls):
|
|||
' called. Which means that the training is not differentially '
|
||||
'private. It happens for example in Keras training in TensorFlow '
|
||||
'2.0+.')
|
||||
return super(DPOptimizerClass,
|
||||
self).apply_gradients(grads_and_vars, global_step, name)
|
||||
return super(DPOptimizerClass, self).apply_gradients(
|
||||
grads_and_vars=grads_and_vars, global_step=global_step, name=name)
|
||||
|
||||
return DPOptimizerClass
|
||||
|
||||
|
|
Loading…
Reference in a new issue