From fc7504efcaeebb1674f32dd694b057665129f751 Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 1 Sep 2021 19:00:25 -0700 Subject: [PATCH] Slight language adjustments PiperOrigin-RevId: 394363646 --- g3doc/tutorials/classification_privacy.ipynb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/g3doc/tutorials/classification_privacy.ipynb b/g3doc/tutorials/classification_privacy.ipynb index 71cf3f2..08004cf 100644 --- a/g3doc/tutorials/classification_privacy.ipynb +++ b/g3doc/tutorials/classification_privacy.ipynb @@ -77,7 +77,7 @@ "id": "vsCUvXP0W4j2" }, "source": [ - "[Differential privacy](https://en.wikipedia.org/wiki/Differential_privacy) (DP) is a framework for measuring the privacy guarantees provided by an algorithm. Through the lens of differential privacy, you can design machine learning algorithms that responsibly train models on private data. Learning with differential privacy provides provable guarantees of privacy, mitigating the risk of exposing sensitive training data in machine learning. Intuitively, a model trained with differential privacy should not be affected by any single training example, or small set of training examples, in its data set. This mitigates the risk of exposing sensitive training data in ML." + "[Differential privacy](https://en.wikipedia.org/wiki/Differential_privacy) (DP) is a framework for measuring the privacy guarantees provided by an algorithm. Through the lens of differential privacy, you can design machine learning algorithms that responsibly train models on private data. Learning with differential privacy provides measurable guarantees of privacy, helping to mitigate the risk of exposing sensitive training data in machine learning. Intuitively, a model trained with differential privacy should not be affected by any single training example, or small set of training examples, in its data set. This helps mitigate the risk of exposing sensitive training data in ML." ] }, { @@ -452,6 +452,7 @@ "colab": { "collapsed_sections": [], "name": "classification_privacy.ipynb", + "provenance": [], "toc_visible": true }, "kernelspec": {