more lint

This commit is contained in:
npapernot 2019-07-29 21:43:19 +00:00
parent 33c3f058ac
commit f06443d50e

View file

@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Bolton Optimizer for bolton method""" """Bolton Optimizer for bolton method."""
from __future__ import absolute_import from __future__ import absolute_import
from __future__ import division from __future__ import division
@ -28,8 +28,10 @@ _accepted_distributions = ['laplace'] # implemented distributions for noising
class GammaBetaDecreasingStep( class GammaBetaDecreasingStep(
optimizer_v2.learning_rate_schedule.LearningRateSchedule): optimizer_v2.learning_rate_schedule.LearningRateSchedule):
"""Computes LR as minimum of 1/beta and 1/(gamma * step) at each step. """Computes LR as minimum of 1/beta and 1/(gamma * step) at each step.
A required step for privacy guarantees.
This is a required step for privacy guarantees.
""" """
def __init__(self): def __init__(self):
self.is_init = False self.is_init = False
self.beta = None self.beta = None
@ -37,8 +39,10 @@ class GammaBetaDecreasingStep(
def __call__(self, step): def __call__(self, step):
"""Computes and returns the learning rate. """Computes and returns the learning rate.
Args: Args:
step: the current iteration number step: the current iteration number
Returns: Returns:
decayed learning rate to minimum of 1/beta and 1/(gamma * step) as per decayed learning rate to minimum of 1/beta and 1/(gamma * step) as per
the Bolton privacy requirements. the Bolton privacy requirements.
@ -107,6 +111,7 @@ class Bolton(optimizer_v2.OptimizerV2):
Bolt-on Differential Privacy for Scalable Stochastic Gradient Bolt-on Differential Privacy for Scalable Stochastic Gradient
Descent-based Analytics by Xi Wu et. al. Descent-based Analytics by Xi Wu et. al.
""" """
def __init__(self, # pylint: disable=super-init-not-called def __init__(self, # pylint: disable=super-init-not-called
optimizer, optimizer,
loss, loss,
@ -118,11 +123,12 @@ class Bolton(optimizer_v2.OptimizerV2):
optimizer: Optimizer_v2 or subclass to be used as the optimizer optimizer: Optimizer_v2 or subclass to be used as the optimizer
(wrapped). (wrapped).
loss: StrongConvexLoss function that the model is being compiled with. loss: StrongConvexLoss function that the model is being compiled with.
dtype: dtype
""" """
if not isinstance(loss, StrongConvexMixin): if not isinstance(loss, StrongConvexMixin):
raise ValueError("loss function must be a Strongly Convex and therefore " raise ValueError('loss function must be a Strongly Convex and therefore '
"extend the StrongConvexMixin.") 'extend the StrongConvexMixin.')
self._private_attributes = ['_internal_optimizer', self._private_attributes = ['_internal_optimizer',
'dtype', 'dtype',
'noise_distribution', 'noise_distribution',
@ -154,6 +160,9 @@ class Bolton(optimizer_v2.OptimizerV2):
Args: Args:
force: True to normalize regardless of previous weight values. force: True to normalize regardless of previous weight values.
False to check if weights > R-ball and only normalize then. False to check if weights > R-ball and only normalize then.
Raises:
Exception:
""" """
if not self._is_init: if not self._is_init:
raise Exception('This method must be called from within the optimizer\'s ' raise Exception('This method must be called from within the optimizer\'s '
@ -171,7 +180,7 @@ class Bolton(optimizer_v2.OptimizerV2):
) )
def get_noise(self, input_dim, output_dim): def get_noise(self, input_dim, output_dim):
"""Sample noise to be added to weights for privacy guarantee """Sample noise to be added to weights for privacy guarantee.
Args: Args:
input_dim: the input dimensionality for the weights input_dim: the input dimensionality for the weights
@ -179,6 +188,9 @@ class Bolton(optimizer_v2.OptimizerV2):
Returns: Returns:
Noise in shape of layer's weights to be added to the weights. Noise in shape of layer's weights to be added to the weights.
Raises:
Exception:
""" """
if not self._is_init: if not self._is_init:
raise Exception('This method must be called from within the optimizer\'s ' raise Exception('This method must be called from within the optimizer\'s '