From c6576f60c46640a52fa001faa075b328d0cb4a31 Mon Sep 17 00:00:00 2001 From: Steve Chien Date: Tue, 21 Dec 2021 11:05:43 -0800 Subject: [PATCH] Ensure that apply_gradients in dp_optimizer.py is using intended arguments for calls to superclass. PiperOrigin-RevId: 417654563 --- tensorflow_privacy/privacy/optimizers/dp_optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tensorflow_privacy/privacy/optimizers/dp_optimizer.py b/tensorflow_privacy/privacy/optimizers/dp_optimizer.py index 1d9c8cc..336c1d2 100644 --- a/tensorflow_privacy/privacy/optimizers/dp_optimizer.py +++ b/tensorflow_privacy/privacy/optimizers/dp_optimizer.py @@ -267,8 +267,8 @@ def make_optimizer_class(cls): ' called. Which means that the training is not differentially ' 'private. It happens for example in Keras training in TensorFlow ' '2.0+.') - return super(DPOptimizerClass, - self).apply_gradients(grads_and_vars, global_step, name) + return super(DPOptimizerClass, self).apply_gradients( + grads_and_vars=grads_and_vars, global_step=global_step, name=name) return DPOptimizerClass