forked from 626_privacy/tensorflow_privacy
more lint
This commit is contained in:
parent
33c3f058ac
commit
f06443d50e
1 changed files with 41 additions and 29 deletions
|
@ -11,7 +11,7 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Bolton Optimizer for bolton method"""
|
||||
"""Bolton Optimizer for bolton method."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
|
@ -28,8 +28,10 @@ _accepted_distributions = ['laplace'] # implemented distributions for noising
|
|||
class GammaBetaDecreasingStep(
|
||||
optimizer_v2.learning_rate_schedule.LearningRateSchedule):
|
||||
"""Computes LR as minimum of 1/beta and 1/(gamma * step) at each step.
|
||||
A required step for privacy guarantees.
|
||||
|
||||
This is a required step for privacy guarantees.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.is_init = False
|
||||
self.beta = None
|
||||
|
@ -37,8 +39,10 @@ class GammaBetaDecreasingStep(
|
|||
|
||||
def __call__(self, step):
|
||||
"""Computes and returns the learning rate.
|
||||
|
||||
Args:
|
||||
step: the current iteration number
|
||||
|
||||
Returns:
|
||||
decayed learning rate to minimum of 1/beta and 1/(gamma * step) as per
|
||||
the Bolton privacy requirements.
|
||||
|
@ -107,6 +111,7 @@ class Bolton(optimizer_v2.OptimizerV2):
|
|||
Bolt-on Differential Privacy for Scalable Stochastic Gradient
|
||||
Descent-based Analytics by Xi Wu et. al.
|
||||
"""
|
||||
|
||||
def __init__(self, # pylint: disable=super-init-not-called
|
||||
optimizer,
|
||||
loss,
|
||||
|
@ -118,11 +123,12 @@ class Bolton(optimizer_v2.OptimizerV2):
|
|||
optimizer: Optimizer_v2 or subclass to be used as the optimizer
|
||||
(wrapped).
|
||||
loss: StrongConvexLoss function that the model is being compiled with.
|
||||
dtype: dtype
|
||||
"""
|
||||
|
||||
if not isinstance(loss, StrongConvexMixin):
|
||||
raise ValueError("loss function must be a Strongly Convex and therefore "
|
||||
"extend the StrongConvexMixin.")
|
||||
raise ValueError('loss function must be a Strongly Convex and therefore '
|
||||
'extend the StrongConvexMixin.')
|
||||
self._private_attributes = ['_internal_optimizer',
|
||||
'dtype',
|
||||
'noise_distribution',
|
||||
|
@ -154,6 +160,9 @@ class Bolton(optimizer_v2.OptimizerV2):
|
|||
Args:
|
||||
force: True to normalize regardless of previous weight values.
|
||||
False to check if weights > R-ball and only normalize then.
|
||||
|
||||
Raises:
|
||||
Exception:
|
||||
"""
|
||||
if not self._is_init:
|
||||
raise Exception('This method must be called from within the optimizer\'s '
|
||||
|
@ -171,7 +180,7 @@ class Bolton(optimizer_v2.OptimizerV2):
|
|||
)
|
||||
|
||||
def get_noise(self, input_dim, output_dim):
|
||||
"""Sample noise to be added to weights for privacy guarantee
|
||||
"""Sample noise to be added to weights for privacy guarantee.
|
||||
|
||||
Args:
|
||||
input_dim: the input dimensionality for the weights
|
||||
|
@ -179,6 +188,9 @@ class Bolton(optimizer_v2.OptimizerV2):
|
|||
|
||||
Returns:
|
||||
Noise in shape of layer's weights to be added to the weights.
|
||||
|
||||
Raises:
|
||||
Exception:
|
||||
"""
|
||||
if not self._is_init:
|
||||
raise Exception('This method must be called from within the optimizer\'s '
|
||||
|
|
Loading…
Reference in a new issue