Ensure that apply_gradients in dp_optimizer.py is using intended arguments for calls to superclass.

PiperOrigin-RevId: 417654563
This commit is contained in:
Steve Chien 2021-12-21 11:05:43 -08:00 committed by A. Unique TensorFlower
parent 347b99d412
commit c6576f60c4

View file

@ -267,8 +267,8 @@ def make_optimizer_class(cls):
' called. Which means that the training is not differentially '
'private. It happens for example in Keras training in TensorFlow '
'2.0+.')
return super(DPOptimizerClass,
self).apply_gradients(grads_and_vars, global_step, name)
return super(DPOptimizerClass, self).apply_gradients(
grads_and_vars=grads_and_vars, global_step=global_step, name=name)
return DPOptimizerClass