From 0082c9ba7682ff2bb389dcfe800e973652642afd Mon Sep 17 00:00:00 2001 From: Christopher Choquette Choo Date: Mon, 22 Jul 2019 11:27:53 -0400 Subject: [PATCH 1/3] Minor doc change and adding README file explaining Bolton Method. --- privacy/bolton/README.md | 56 ++++++++++++++++++++++++++++++++++++++++ privacy/bolton/models.py | 2 +- 2 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 privacy/bolton/README.md diff --git a/privacy/bolton/README.md b/privacy/bolton/README.md new file mode 100644 index 0000000..ad3c141 --- /dev/null +++ b/privacy/bolton/README.md @@ -0,0 +1,56 @@ +# Bolton Module + +This module contains source code for the Bolton method. This method is a subset +of methods used in the ensuring privacy in machine learning that leverages +additional assumptions to provide a new way of approaching the privacy +guarantees. + +## Bolton Description + +This method uses 4 key steps to achieve privacy guarantees: + 1. Adds noise to weights after training (output perturbation). + 2. Projects weights to R after each batch + 3. Limits learning rate + 4. Use a strongly convex loss function (see compile) + +For more details on the strong convexity requirements, see: +Bolt-on Differential Privacy for Scalable Stochastic Gradient +Descent-based Analytics by Xi Wu et al. + +### Why Bolton? + +The major difference for the Bolton method is that it injects noise post model +convergence, rather than noising gradients or weights during training. This +approach requires some additional constraints listed in the Description. +Should the use-case and model satisfy these constraints, this is another +approach that can be trained to maximize utility while maintaining the privacy. +The paper describes in detail the advantages and disadvantages of this approach +and its results compared to some other methods, namely noising at each iteration +and no noising. + +## Tutorials + +This module has a tutorial that can be found in the root tutorials directory, +under boton_tutorial.py. + +## Contribution + +This module was initially contributed by Georgian Partners with the hope of +growing the tensorflow/privacy library. There are several rich use cases for +delta-epsilon privacy in machine learning, some of which can be explored here: +https://medium.com/apache-mxnet/epsilon-differential-privacy-for-machine-learning-using-mxnet-a4270fe3865e +https://arxiv.org/pdf/1811.04911.pdf + +## Contacts + +In addition to the maintainers of tensorflow/privacy listed in the root +README.md, please feel free to contact members of Georgian Partners. In +particular, + +* Georgian Partners (@georgianpartners) +* Ji Chao Zhang (@Jichaogp) +* Christopher Choquette (@cchoquette) + +## Copyright + +Copyright 2019 - Google LLC diff --git a/privacy/bolton/models.py b/privacy/bolton/models.py index 2c5c08a..634f94c 100644 --- a/privacy/bolton/models.py +++ b/privacy/bolton/models.py @@ -38,7 +38,7 @@ class BoltonModel(Model): # pylint: disable=abstract-method For more details on the strong convexity requirements, see: Bolt-on Differential Privacy for Scalable Stochastic Gradient - Descent-based Analytics by Xi Wu et. al. + Descent-based Analytics by Xi Wu et al. """ def __init__(self, From 7785436cf3550821c4fab60e0c9517ee6678a90d Mon Sep 17 00:00:00 2001 From: Christopher Choquette Choo Date: Mon, 22 Jul 2019 11:30:29 -0400 Subject: [PATCH 2/3] Minor changes to README --- privacy/bolton/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/privacy/bolton/README.md b/privacy/bolton/README.md index ad3c141..aa140e5 100644 --- a/privacy/bolton/README.md +++ b/privacy/bolton/README.md @@ -1,6 +1,6 @@ -# Bolton Module +# Bolton SubPpckage -This module contains source code for the Bolton method. This method is a subset +This package contains source code for the Bolton method. This method is a subset of methods used in the ensuring privacy in machine learning that leverages additional assumptions to provide a new way of approaching the privacy guarantees. @@ -30,12 +30,12 @@ and no noising. ## Tutorials -This module has a tutorial that can be found in the root tutorials directory, +This package has a tutorial that can be found in the root tutorials directory, under boton_tutorial.py. ## Contribution -This module was initially contributed by Georgian Partners with the hope of +This package was initially contributed by Georgian Partners with the hope of growing the tensorflow/privacy library. There are several rich use cases for delta-epsilon privacy in machine learning, some of which can be explored here: https://medium.com/apache-mxnet/epsilon-differential-privacy-for-machine-learning-using-mxnet-a4270fe3865e From 968ea70060bc459cfe9426e97d9b69d0719b722c Mon Sep 17 00:00:00 2001 From: Christopher Choquette Choo Date: Mon, 22 Jul 2019 11:30:55 -0400 Subject: [PATCH 3/3] Fixing typos. --- privacy/bolton/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/privacy/bolton/README.md b/privacy/bolton/README.md index aa140e5..95d6b68 100644 --- a/privacy/bolton/README.md +++ b/privacy/bolton/README.md @@ -1,4 +1,4 @@ -# Bolton SubPpckage +# Bolton Subpackage This package contains source code for the Bolton method. This method is a subset of methods used in the ensuring privacy in machine learning that leverages