diff --git a/tensorflow_privacy/privacy/membership_inference_attack/codelabs/codelab_privacy_risk_score.ipynb b/tensorflow_privacy/privacy/membership_inference_attack/codelabs/codelab_privacy_risk_score.ipynb
deleted file mode 100644
index c271f41..0000000
--- a/tensorflow_privacy/privacy/membership_inference_attack/codelabs/codelab_privacy_risk_score.ipynb
+++ /dev/null
@@ -1,816 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "1eiwVljWpzM7"
- },
- "source": [
- "Copyright 2020 The TensorFlow Authors.\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "cellView": "both",
- "colab": {},
- "colab_type": "code",
- "id": "4rmwPgXeptiS"
- },
- "outputs": [],
- "source": [
- "#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n",
- "# you may not use this file except in compliance with the License.\n",
- "# You may obtain a copy of the License at\n",
- "#\n",
- "# https://www.apache.org/licenses/LICENSE-2.0\n",
- "#\n",
- "# Unless required by applicable law or agreed to in writing, software\n",
- "# distributed under the License is distributed on an \"AS IS\" BASIS,\n",
- "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
- "# See the License for the specific language governing permissions and\n",
- "# limitations under the License."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "YM2gRaJMqvMi"
- },
- "source": [
- "# Assess privacy risks with TensorFlow Privacy Membership Inference Attacks"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "-B5ZvlSqqLaR"
- },
- "source": [
- "
"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "9rMuytY7Nn8P"
- },
- "source": [
- "##Overview\n",
- "In this codelab we'll train a simple image classification model on the CIFAR10 dataset, and then use the \"membership inference attack\" against this model to assess if the attacker is able to \"guess\" whether a particular sample was present in the training set."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "FUWqArj_q8vs"
- },
- "source": [
- "## Setup\n",
- "First, set this notebook's runtime to use a GPU, under Runtime > Change runtime type > Hardware accelerator. Then, begin importing the necessary libraries."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {
- "cellView": "form",
- "colab": {},
- "colab_type": "code",
- "id": "Lr1pwHcbralz"
- },
- "outputs": [],
- "source": [
- "#@title Import statements.\n",
- "import numpy as np\n",
- "from typing import Tuple, Text\n",
- "from scipy import special\n",
- "\n",
- "import tensorflow as tf\n",
- "import tensorflow_datasets as tfds\n",
- "\n",
- "# Set verbosity.\n",
- "tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n",
- "from warnings import simplefilter\n",
- "from sklearn.exceptions import ConvergenceWarning\n",
- "simplefilter(action=\"ignore\", category=ConvergenceWarning)\n",
- "simplefilter(action=\"ignore\", category=FutureWarning)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "ucw81ar6ru-6"
- },
- "source": [
- "### Install TensorFlow Privacy."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {
- "cellView": "both",
- "colab": {},
- "colab_type": "code",
- "id": "zcqAmiGH90kl"
- },
- "outputs": [],
- "source": [
- "!pip3 install git+https://github.com/tensorflow/privacy\n",
- "\n",
- "from tensorflow_privacy.privacy.membership_inference_attack import membership_inference_attack as mia"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "pBbcG86th_sW"
- },
- "source": [
- "## Train a model"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {
- "cellView": "form",
- "colab": {},
- "colab_type": "code",
- "id": "vCyOWyyhXLib"
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Loading the dataset.\n",
- "learning rate %f 0.02\n",
- "Model: \"sequential\"\n",
- "_________________________________________________________________\n",
- "Layer (type) Output Shape Param # \n",
- "=================================================================\n",
- "conv2d (Conv2D) (None, 30, 30, 32) 896 \n",
- "_________________________________________________________________\n",
- "max_pooling2d (MaxPooling2D) (None, 15, 15, 32) 0 \n",
- "_________________________________________________________________\n",
- "conv2d_1 (Conv2D) (None, 13, 13, 32) 9248 \n",
- "_________________________________________________________________\n",
- "max_pooling2d_1 (MaxPooling2 (None, 6, 6, 32) 0 \n",
- "_________________________________________________________________\n",
- "conv2d_2 (Conv2D) (None, 4, 4, 32) 9248 \n",
- "_________________________________________________________________\n",
- "max_pooling2d_2 (MaxPooling2 (None, 2, 2, 32) 0 \n",
- "_________________________________________________________________\n",
- "flatten (Flatten) (None, 128) 0 \n",
- "_________________________________________________________________\n",
- "dense (Dense) (None, 64) 8256 \n",
- "_________________________________________________________________\n",
- "dense_1 (Dense) (None, 10) 650 \n",
- "=================================================================\n",
- "Total params: 28,298\n",
- "Trainable params: 28,298\n",
- "Non-trainable params: 0\n",
- "_________________________________________________________________\n",
- "Epoch 1/100\n",
- "200/200 [==============================] - 2s 8ms/step - loss: 2.0185 - accuracy: 0.2515 - val_loss: 1.8635 - val_accuracy: 0.3168\n",
- "Epoch 2/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.6232 - accuracy: 0.4059 - val_loss: 1.4847 - val_accuracy: 0.4549\n",
- "Epoch 3/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.4421 - accuracy: 0.4752 - val_loss: 1.3781 - val_accuracy: 0.5041\n",
- "Epoch 4/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.3402 - accuracy: 0.5152 - val_loss: 1.2500 - val_accuracy: 0.5520\n",
- "Epoch 5/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.2316 - accuracy: 0.5614 - val_loss: 1.2739 - val_accuracy: 0.5524\n",
- "Epoch 6/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.1568 - accuracy: 0.5899 - val_loss: 1.2040 - val_accuracy: 0.5748\n",
- "Epoch 7/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.1007 - accuracy: 0.6094 - val_loss: 1.1218 - val_accuracy: 0.6042\n",
- "Epoch 8/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 1.0437 - accuracy: 0.6313 - val_loss: 1.0968 - val_accuracy: 0.6192\n",
- "Epoch 9/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.9965 - accuracy: 0.6489 - val_loss: 1.0501 - val_accuracy: 0.6338\n",
- "Epoch 10/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.9673 - accuracy: 0.6589 - val_loss: 1.0594 - val_accuracy: 0.6322\n",
- "Epoch 11/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.9388 - accuracy: 0.6711 - val_loss: 1.0302 - val_accuracy: 0.6445\n",
- "Epoch 12/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.9104 - accuracy: 0.6800 - val_loss: 0.9907 - val_accuracy: 0.6553\n",
- "Epoch 13/100\n",
- "200/200 [==============================] - 1s 6ms/step - loss: 0.8827 - accuracy: 0.6896 - val_loss: 0.9999 - val_accuracy: 0.6509\n",
- "Epoch 14/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.8453 - accuracy: 0.7023 - val_loss: 0.9708 - val_accuracy: 0.6674\n",
- "Epoch 15/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.8407 - accuracy: 0.7067 - val_loss: 0.9434 - val_accuracy: 0.6739\n",
- "Epoch 16/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.8152 - accuracy: 0.7136 - val_loss: 0.9440 - val_accuracy: 0.6786\n",
- "Epoch 17/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7988 - accuracy: 0.7184 - val_loss: 0.9670 - val_accuracy: 0.6710\n",
- "Epoch 18/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7763 - accuracy: 0.7270 - val_loss: 0.9224 - val_accuracy: 0.6854\n",
- "Epoch 19/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7650 - accuracy: 0.7307 - val_loss: 0.9305 - val_accuracy: 0.6832\n",
- "Epoch 20/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7508 - accuracy: 0.7354 - val_loss: 0.9674 - val_accuracy: 0.6707\n",
- "Epoch 21/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7306 - accuracy: 0.7410 - val_loss: 0.9122 - val_accuracy: 0.6917\n",
- "Epoch 22/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7142 - accuracy: 0.7498 - val_loss: 0.9287 - val_accuracy: 0.6868\n",
- "Epoch 23/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.7071 - accuracy: 0.7514 - val_loss: 0.9046 - val_accuracy: 0.6934\n",
- "Epoch 24/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6923 - accuracy: 0.7564 - val_loss: 0.9136 - val_accuracy: 0.6908\n",
- "Epoch 25/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6791 - accuracy: 0.7603 - val_loss: 0.9856 - val_accuracy: 0.6702\n",
- "Epoch 26/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6711 - accuracy: 0.7637 - val_loss: 0.9372 - val_accuracy: 0.6865\n",
- "Epoch 27/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6556 - accuracy: 0.7672 - val_loss: 0.9847 - val_accuracy: 0.6768\n",
- "Epoch 28/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6497 - accuracy: 0.7714 - val_loss: 0.9554 - val_accuracy: 0.6881\n",
- "Epoch 29/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6311 - accuracy: 0.7765 - val_loss: 0.9962 - val_accuracy: 0.6801\n",
- "Epoch 30/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6291 - accuracy: 0.7773 - val_loss: 0.9268 - val_accuracy: 0.6926\n",
- "Epoch 31/100\n",
- "200/200 [==============================] - 1s 6ms/step - loss: 0.6175 - accuracy: 0.7802 - val_loss: 0.9507 - val_accuracy: 0.6904\n",
- "Epoch 32/100\n",
- "200/200 [==============================] - 1s 6ms/step - loss: 0.6107 - accuracy: 0.7830 - val_loss: 0.9776 - val_accuracy: 0.6799\n",
- "Epoch 33/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.6049 - accuracy: 0.7877 - val_loss: 0.9712 - val_accuracy: 0.6897\n",
- "Epoch 34/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5963 - accuracy: 0.7884 - val_loss: 0.9548 - val_accuracy: 0.6889\n",
- "Epoch 35/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5959 - accuracy: 0.7881 - val_loss: 0.9729 - val_accuracy: 0.6865\n",
- "Epoch 36/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5801 - accuracy: 0.7955 - val_loss: 0.9659 - val_accuracy: 0.6949\n",
- "Epoch 37/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5745 - accuracy: 0.7981 - val_loss: 0.9663 - val_accuracy: 0.6908\n",
- "Epoch 38/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5651 - accuracy: 0.7993 - val_loss: 0.9689 - val_accuracy: 0.6931\n",
- "Epoch 39/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5608 - accuracy: 0.8014 - val_loss: 0.9899 - val_accuracy: 0.6894\n",
- "Epoch 40/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5507 - accuracy: 0.8049 - val_loss: 0.9990 - val_accuracy: 0.6888\n",
- "Epoch 41/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5409 - accuracy: 0.8066 - val_loss: 0.9860 - val_accuracy: 0.6904\n",
- "Epoch 42/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5494 - accuracy: 0.8040 - val_loss: 0.9937 - val_accuracy: 0.6916\n",
- "Epoch 43/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5226 - accuracy: 0.8146 - val_loss: 0.9943 - val_accuracy: 0.6888\n",
- "Epoch 44/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5214 - accuracy: 0.8148 - val_loss: 1.0146 - val_accuracy: 0.6826\n",
- "Epoch 45/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5288 - accuracy: 0.8126 - val_loss: 1.0247 - val_accuracy: 0.6926\n",
- "Epoch 46/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5182 - accuracy: 0.8149 - val_loss: 1.0246 - val_accuracy: 0.6883\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Epoch 47/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5079 - accuracy: 0.8190 - val_loss: 1.0530 - val_accuracy: 0.6888\n",
- "Epoch 48/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5088 - accuracy: 0.8188 - val_loss: 1.0607 - val_accuracy: 0.6876\n",
- "Epoch 49/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4989 - accuracy: 0.8218 - val_loss: 1.0523 - val_accuracy: 0.6858\n",
- "Epoch 50/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.5042 - accuracy: 0.8200 - val_loss: 1.0645 - val_accuracy: 0.6898\n",
- "Epoch 51/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4800 - accuracy: 0.8292 - val_loss: 1.0762 - val_accuracy: 0.6812\n",
- "Epoch 52/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4853 - accuracy: 0.8262 - val_loss: 1.0960 - val_accuracy: 0.6828\n",
- "Epoch 53/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4754 - accuracy: 0.8308 - val_loss: 1.0551 - val_accuracy: 0.6916\n",
- "Epoch 54/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4745 - accuracy: 0.8284 - val_loss: 1.1048 - val_accuracy: 0.6768\n",
- "Epoch 55/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4770 - accuracy: 0.8309 - val_loss: 1.0978 - val_accuracy: 0.6893\n",
- "Epoch 56/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4708 - accuracy: 0.8311 - val_loss: 1.1025 - val_accuracy: 0.6791\n",
- "Epoch 57/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4577 - accuracy: 0.8366 - val_loss: 1.1247 - val_accuracy: 0.6792\n",
- "Epoch 58/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4693 - accuracy: 0.8321 - val_loss: 1.1224 - val_accuracy: 0.6808\n",
- "Epoch 59/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4533 - accuracy: 0.8385 - val_loss: 1.1161 - val_accuracy: 0.6830\n",
- "Epoch 60/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4602 - accuracy: 0.8326 - val_loss: 1.1262 - val_accuracy: 0.6781\n",
- "Epoch 61/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4528 - accuracy: 0.8379 - val_loss: 1.2267 - val_accuracy: 0.6654\n",
- "Epoch 62/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4533 - accuracy: 0.8354 - val_loss: 1.1433 - val_accuracy: 0.6901\n",
- "Epoch 63/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4373 - accuracy: 0.8418 - val_loss: 1.1481 - val_accuracy: 0.6857\n",
- "Epoch 64/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4442 - accuracy: 0.8391 - val_loss: 1.1446 - val_accuracy: 0.6854\n",
- "Epoch 65/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4247 - accuracy: 0.8480 - val_loss: 1.1511 - val_accuracy: 0.6856\n",
- "Epoch 66/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4395 - accuracy: 0.8406 - val_loss: 1.1960 - val_accuracy: 0.6791\n",
- "Epoch 67/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4402 - accuracy: 0.8394 - val_loss: 1.2087 - val_accuracy: 0.6852\n",
- "Epoch 68/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4247 - accuracy: 0.8464 - val_loss: 1.1801 - val_accuracy: 0.6837\n",
- "Epoch 69/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4219 - accuracy: 0.8460 - val_loss: 1.2674 - val_accuracy: 0.6683\n",
- "Epoch 70/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4184 - accuracy: 0.8494 - val_loss: 1.2206 - val_accuracy: 0.6828\n",
- "Epoch 71/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4116 - accuracy: 0.8505 - val_loss: 1.1856 - val_accuracy: 0.6782\n",
- "Epoch 72/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4177 - accuracy: 0.8481 - val_loss: 1.2790 - val_accuracy: 0.6791\n",
- "Epoch 73/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4135 - accuracy: 0.8505 - val_loss: 1.2457 - val_accuracy: 0.6806\n",
- "Epoch 74/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4046 - accuracy: 0.8528 - val_loss: 1.2291 - val_accuracy: 0.6852\n",
- "Epoch 75/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4132 - accuracy: 0.8500 - val_loss: 1.2248 - val_accuracy: 0.6866\n",
- "Epoch 76/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4116 - accuracy: 0.8501 - val_loss: 1.2619 - val_accuracy: 0.6793\n",
- "Epoch 77/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4146 - accuracy: 0.8500 - val_loss: 1.2497 - val_accuracy: 0.6780\n",
- "Epoch 78/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3922 - accuracy: 0.8579 - val_loss: 1.2788 - val_accuracy: 0.6718\n",
- "Epoch 79/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4084 - accuracy: 0.8499 - val_loss: 1.2568 - val_accuracy: 0.6876\n",
- "Epoch 80/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3976 - accuracy: 0.8559 - val_loss: 1.3637 - val_accuracy: 0.6652\n",
- "Epoch 81/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.4061 - accuracy: 0.8511 - val_loss: 1.2873 - val_accuracy: 0.6775\n",
- "Epoch 82/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3781 - accuracy: 0.8623 - val_loss: 1.3062 - val_accuracy: 0.6756\n",
- "Epoch 83/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3825 - accuracy: 0.8606 - val_loss: 1.2976 - val_accuracy: 0.6825\n",
- "Epoch 84/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3913 - accuracy: 0.8571 - val_loss: 1.4069 - val_accuracy: 0.6528\n",
- "Epoch 85/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3876 - accuracy: 0.8591 - val_loss: 1.3395 - val_accuracy: 0.6753\n",
- "Epoch 86/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3879 - accuracy: 0.8580 - val_loss: 1.3092 - val_accuracy: 0.6741\n",
- "Epoch 87/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3695 - accuracy: 0.8665 - val_loss: 1.3327 - val_accuracy: 0.6762\n",
- "Epoch 88/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3835 - accuracy: 0.8608 - val_loss: 1.3579 - val_accuracy: 0.6775\n",
- "Epoch 89/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3816 - accuracy: 0.8619 - val_loss: 1.3944 - val_accuracy: 0.6622\n",
- "Epoch 90/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3804 - accuracy: 0.8609 - val_loss: 1.3264 - val_accuracy: 0.6854\n",
- "Epoch 91/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3718 - accuracy: 0.8647 - val_loss: 1.3646 - val_accuracy: 0.6713\n",
- "Epoch 92/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3676 - accuracy: 0.8661 - val_loss: 1.3926 - val_accuracy: 0.6759\n",
- "Epoch 93/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3767 - accuracy: 0.8623 - val_loss: 1.3605 - val_accuracy: 0.6701\n",
- "Epoch 94/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3813 - accuracy: 0.8612 - val_loss: 1.3938 - val_accuracy: 0.6659\n",
- "Epoch 95/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3631 - accuracy: 0.8667 - val_loss: 1.4130 - val_accuracy: 0.6749\n",
- "Epoch 96/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3604 - accuracy: 0.8694 - val_loss: 1.3780 - val_accuracy: 0.6832\n",
- "Epoch 97/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3657 - accuracy: 0.8666 - val_loss: 1.4425 - val_accuracy: 0.6719\n",
- "Epoch 98/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3726 - accuracy: 0.8636 - val_loss: 1.4077 - val_accuracy: 0.6699\n",
- "Epoch 99/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3671 - accuracy: 0.8663 - val_loss: 1.4207 - val_accuracy: 0.6769\n",
- "Epoch 100/100\n",
- "200/200 [==============================] - 1s 5ms/step - loss: 0.3529 - accuracy: 0.8706 - val_loss: 1.4817 - val_accuracy: 0.6716\n",
- "Finished training.\n"
- ]
- }
- ],
- "source": [
- "#@markdown Train a simple model on CIFAR10 with Keras.\n",
- "\n",
- "dataset = 'cifar10'\n",
- "num_classes = 10\n",
- "num_conv = 3\n",
- "activation = 'relu'\n",
- "lr = 0.02\n",
- "momentum = 0.9\n",
- "batch_size = 250\n",
- "epochs = 100 # Privacy risks are especially visible with lots of epochs.\n",
- "\n",
- "\n",
- "def small_cnn(input_shape: Tuple[int],\n",
- " num_classes: int,\n",
- " num_conv: int,\n",
- " activation: Text = 'relu') -> tf.keras.models.Sequential:\n",
- " \"\"\"Setup a small CNN for image classification.\n",
- "\n",
- " Args:\n",
- " input_shape: Integer tuple for the shape of the images.\n",
- " num_classes: Number of prediction classes.\n",
- " num_conv: Number of convolutional layers.\n",
- " activation: The activation function to use for conv and dense layers.\n",
- "\n",
- " Returns:\n",
- " The Keras model.\n",
- " \"\"\"\n",
- " model = tf.keras.models.Sequential()\n",
- " model.add(tf.keras.layers.Input(shape=input_shape))\n",
- "\n",
- " # Conv layers\n",
- " for _ in range(num_conv):\n",
- " model.add(tf.keras.layers.Conv2D(32, (3, 3), activation=activation))\n",
- " model.add(tf.keras.layers.MaxPooling2D())\n",
- "\n",
- " model.add(tf.keras.layers.Flatten())\n",
- " model.add(tf.keras.layers.Dense(64, activation=activation))\n",
- " model.add(tf.keras.layers.Dense(num_classes))\n",
- " return model\n",
- "\n",
- "\n",
- "print('Loading the dataset.')\n",
- "train_ds = tfds.as_numpy(\n",
- " tfds.load(dataset, split=tfds.Split.TRAIN, batch_size=-1))\n",
- "test_ds = tfds.as_numpy(\n",
- " tfds.load(dataset, split=tfds.Split.TEST, batch_size=-1))\n",
- "x_train = train_ds['image'].astype('float32') / 255.\n",
- "y_train_indices = train_ds['label'][:, np.newaxis]\n",
- "x_test = test_ds['image'].astype('float32') / 255.\n",
- "y_test_indices = test_ds['label'][:, np.newaxis]\n",
- "\n",
- "# Convert class vectors to binary class matrices.\n",
- "y_train = tf.keras.utils.to_categorical(y_train_indices, num_classes)\n",
- "y_test = tf.keras.utils.to_categorical(y_test_indices, num_classes)\n",
- "\n",
- "input_shape = x_train.shape[1:]\n",
- "\n",
- "model = small_cnn(\n",
- " input_shape, num_classes, num_conv=num_conv, activation=activation)\n",
- "\n",
- "print('learning rate %f', lr)\n",
- "\n",
- "optimizer = tf.keras.optimizers.SGD(lr=lr, momentum=momentum)\n",
- "\n",
- "loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True)\n",
- "model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])\n",
- "model.summary()\n",
- "model.fit(\n",
- " x_train,\n",
- " y_train,\n",
- " batch_size=batch_size,\n",
- " epochs=epochs,\n",
- " validation_data=(x_test, y_test),\n",
- " shuffle=True)\n",
- "print('Finished training.')"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "ee-zjGGGV1DC"
- },
- "source": [
- "## Calculate logits, probabilities and loss values for training and test sets.\n",
- "\n",
- "We will use these values later in the membership inference attack to separate training and test samples."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {
- "cellView": "both",
- "colab": {},
- "colab_type": "code",
- "id": "um9r0tSiPx4u"
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Predict on train...\n",
- "Predict on test...\n",
- "Apply softmax to get probabilities from logits...\n",
- "Compute losses...\n"
- ]
- }
- ],
- "source": [
- "print('Predict on train...')\n",
- "logits_train = model.predict(x_train, batch_size=batch_size)\n",
- "print('Predict on test...')\n",
- "logits_test = model.predict(x_test, batch_size=batch_size)\n",
- "\n",
- "print('Apply softmax to get probabilities from logits...')\n",
- "prob_train = special.softmax(logits_train, axis=1)\n",
- "prob_test = special.softmax(logits_test, axis=1)\n",
- "\n",
- "print('Compute losses...')\n",
- "cce = tf.keras.backend.categorical_crossentropy\n",
- "constant = tf.keras.backend.constant\n",
- "\n",
- "loss_train = cce(constant(y_train), constant(prob_train), from_logits=False).numpy()\n",
- "loss_test = cce(constant(y_test), constant(prob_test), from_logits=False).numpy()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "QETxVOHLiHP4"
- },
- "source": [
- "## Run membership inference attacks.\n",
- "\n",
- "We will now execute a membership inference attack against the previously trained CIFAR10 model. This will generate a number of scores, most notably, attacker advantage and AUC for the membership inference classifier.\n",
- "\n",
- "An AUC of close to 0.5 means that the attack wasn't able to identify training samples, which means that the model doesn't have privacy issues according to this test. Higher values, on the contrary, indicate potential privacy issues."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {
- "colab": {},
- "colab_type": "code",
- "id": "B8NIwhVwQT7I"
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Best-performing attacks over all slices\n",
- " THRESHOLD_ATTACK achieved an AUC of 0.74 on slice CORRECTLY_CLASSIFIED=False\n",
- " THRESHOLD_ATTACK achieved an advantage of 0.37 on slice CORRECTLY_CLASSIFIED=False\n",
- "\n",
- "Best-performing attacks over slice: \"Entire dataset\"\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an AUC of 0.60\n",
- " THRESHOLD_ATTACK achieved an advantage of 0.20\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=0\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.64\n",
- " LOGISTIC_REGRESSION achieved an advantage of 0.24\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=1\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.57\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an advantage of 0.16\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=2\"\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an AUC of 0.64\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an advantage of 0.26\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=3\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.68\n",
- " LOGISTIC_REGRESSION achieved an advantage of 0.29\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=4\"\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an AUC of 0.64\n",
- " THRESHOLD_ATTACK achieved an advantage of 0.24\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=5\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.63\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an advantage of 0.23\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=6\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.63\n",
- " LOGISTIC_REGRESSION achieved an advantage of 0.21\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=7\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.60\n",
- " THRESHOLD_ENTROPY_ATTACK achieved an advantage of 0.21\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=8\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.60\n",
- " LOGISTIC_REGRESSION achieved an advantage of 0.19\n",
- "\n",
- "Best-performing attacks over slice: \"CLASS=9\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.62\n",
- " LOGISTIC_REGRESSION achieved an advantage of 0.20\n",
- "\n",
- "Best-performing attacks over slice: \"CORRECTLY_CLASSIFIED=True\"\n",
- " LOGISTIC_REGRESSION achieved an AUC of 0.50\n",
- " THRESHOLD_ATTACK achieved an advantage of 0.04\n",
- "\n",
- "Best-performing attacks over slice: \"CORRECTLY_CLASSIFIED=False\"\n",
- " THRESHOLD_ATTACK achieved an AUC of 0.74\n",
- " THRESHOLD_ATTACK achieved an advantage of 0.37\n"
- ]
- },
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy86wFpkAAAACXBIWXMAAAsTAAALEwEAmpwYAAA0xklEQVR4nO3deXhN1/7H8feSiJjnOQgNNSQxpVQpRfWntNVWKbe3kxNjDS2X0mou2qpWJ2NRWqVXVF1jRWnNlEbIdMypKSElIsbIvH5/JHITGQQ5Z5+T8309T5/us/eS89kR55u19tprK601QgghHFcxowMIIYQwlhQCIYRwcFIIhBDCwUkhEEIIByeFQAghHJyz0QHuVZUqVbS7u7vRMYQQwq4cOHDgkta6am7H7K4QuLu7ExQUZHQMIYSwK0qpM3kdk6EhIYRwcFIIhBDCwUkhEEIIB2d31whyk5ycTFRUFAkJCUZHKfJcXV1xc3OjePHiRkcRQhSSIlEIoqKiKFu2LO7u7iiljI5TZGmtiY2NJSoqivr16xsdRwhRSCw2NKSU+k4pdVEpZc7juFJKzVRKRSilwpRSre73vRISEqhcubIUAQtTSlG5cmXpeQlRxFjyGsFioHs+x58GGmb8Nwj45kHeTIqAdcj3WYiix2JDQ1rrnUop93ya9AKW6PR1sPcppSoopWpqraMtlUkIIezNsb+vM+6/YVy5dp1eresz+qmHC/09jJw1VBuIzPI6KmNfDkqpQUqpIKVUUExMjFXC3Y81a9aglOLo0aOZ+7Zv384zzzyTrd0bb7zBypUrgfQL3ePHj6dhw4a0atWKdu3asXHjxnzfJzExkZdffhkPDw/atm3L6dOnc7Q5duwYLVq0yPyvXLlyfP311wB88MEHeHt706JFC5566inOnz8PQFxcHC+88ALe3t60adMGsznXUT0hhIVdvpmE31oz7uM38H9f7yQ08gpnrqayMijPe8IeiF1MH9VaL9Ba+2itfapWzfUOaZvg7+9Phw4d8Pf3L/Cf+eCDD4iOjsZsNnPw4EHWrFnD9evX8/0zixYtomLFikRERPDOO+/w7rvv5mjz8MMPExISQkhICAcOHKBUqVK88MILAIwdO5awsDBCQkJ45plnmDJlCgBTp06lRYsWhIWFsWTJEkaNGnUPZy+EuFdaa24kprDt2EU+WGPmmVm7cB+/gVYf/saSvdk/9NvVr8C819pYJIeRs4bOAXWyvHbL2GeXbty4we7du9m2bRvPPvsskydPvuufiY+P59tvv+XUqVOUKFECgOrVq9O3b998/9zatWuZNGkSAC+99BLDhw9Ha53n+P2WLVt46KGHqFevHgDlypXLPHbz5s3MP3f48GHGjx8PQOPGjTl9+jQXLlygevXqdz0XIUTBxCelsOrgOb7YfIy4+OR826rTf/L5G53p3b2zRTMZWQjWAcOVUsuBtsDVwrg+4D5+wwMHy83paT3zPb527Vq6d+9Oo0aNqFy5MgcOHKB169b5/pmIiAjq1q2b7YM5K19fX4YMGYKPj0+2/efOnaNOnfQa6uzsTPny5YmNjaVKlSq5fp3ly5fTv3//bPvef/99lixZQvny5dm2bRsAzZs3Z9WqVTz++OMEBgZy5swZoqKipBAI8QASU1LZfeIS60PPsybkfJ7tnIopnmpaHafYk6ycPYXXX+zBpO8nUbJkSYtntFghUEr5A08AVZRSUcC/geIAWut5QADQA4gA4oE3LZXFGvz9/TOHUvr164e/vz+tW7fO87f0gsy+Wbhw4QPnSkpKYt26dXzyySfZ9n/88cd8/PHHfPLJJ8yePZvJkyczfvx4Ro0aRYsWLfDy8qJly5Y4OTk9cAYhHIXWmvBzVzlwJo7Iy7f48c8zJKWk5dr2sYcq84h7JUZ08cDZqRiRkZEMGTKEyMhIfvH/PscvgJZkyVlD/e9yXANvFfb73u03d0u4fPkyW7duJTw8HKUUqampKKWYPn06lStXJi4uLkf7KlWq4OHhwdmzZ7l27VqevYLc1K5dm8jISNzc3EhJSeHq1atUrlw517YbN26kVatWef5W/8orr9CjRw8mT55MuXLl+P7774H0H+j69evToEGDAucSwtGkpmmir95i94lL+K09RFJq7h/6AGVdnenVohY9PGvymMf/eu9paWl88803+Pn5MWrUKFavXo2Li4s14mcqEncWG23lypW8+uqrzJ8/P3Nfp06d2LVrF23btuX8+fMcOXKEJk2acObMGUJDQ2nRogWlSpXCZDIxatQo5s+fj4uLCzExMWzfvp0+ffrk+X7PPfccP/zwA+3atWPlypV06dIlzx6Gv79/jmGhEydO0LBhQyB9SKtx48YAXLlyhVKlSuHi4sLChQvp2LHjPRUoIRyB1pqZWyL46vfj+bZrXqcCHlXL8HCNMgxoXx9np5xzc44fP46vry/Jycns2LGDpk2bWip2vqQQFAJ/f/8cM3d69+6Nv78/HTt25Mcff+TNN98kISGB4sWLs3DhQsqXLw/ARx99xMSJE2natCmurq6ULl06cxZPXtcITCYTr776Kh4eHlSqVInly5cDcP78eXx9fQkICADSLwT/9ttv2QoUwPjx4zl27BjFihWjXr16zJs3D4AjR47w+uuvo5SiWbNmLFq0qPC/WULYIa01R6Kv8/nmY2w9ejHHcadiimplS9DDqybjuj9MCef8h1RTUlL48ssv+eyzz/jggw8YPny4ocOwKn2Exn74+PjoOx9Mc/u3bWEd8v0WRV1SShpbj15k5pYTHI6+lme7Kb2a0b9NXYrn8tt+XkJDQxkwYACVKlViwYIFVlu3Syl1QGud64UH6REIIUSG6Ku3eOXbPzl56Waebdp7VGbOP1pRodS9jeMnJiby0UcfMX/+fKZNm8abb75pM0u2SCEQQjikawnJHDwTR8TFG+w6cYkdx3OuWuBWsSQ9vWsyrJMH5Uvd/9Lre/fuxWQyZd7oWatWrQeJXuiKTCHI74YqUXjsbShRiNQ0TWjUFb7fc5rAU7FcuJZIKRcn4pNS8/wz73ZvzJBODR74M+XGjRtMnDiRFStWMHPmTHr37m2Tn1NFohC4uroSGxsrS1Fb2O3nEbi6uhodRYg87f0rlh/3nWFDeDRVy5Yg5npijjbxSakoBdXKpt/R/4x3LWqUc+XZ5rWoUb5wfr5/++03Bg0aRMeOHQkPD89zirctKBKFwM3NjaioKGx5Qbqi4vYTyoSwFSmpaawJOc+KoEgCT13OdixrEXAtXoyHa5RjeGcPPGuXo3LpErg4F/5ya3FxcYwZM4YtW7Ywf/58unfPbzV+21AkCkHx4sXliVlCOIiklDT+PBXLfw9EsfPEJS7fTMq1XeeHq/JaO3ea16lAxVLFrTJasHr1aoYPH84LL7yA2WymbNmyFn/PwlAkCoEQomhLTEkl5OwVXl6wL882bhVL0r9NXfr61KFqxpCPtfz999+MGDGCsLAwli9fzuOPP27V939QUgiEEDYlNU1zJT6JgPBoNh26wO6IS7m2a1arHPFJqUx70Yu2DYwZf9das3TpUsaOHcuAAQNYunSpXV5Dk0IghDBUappmQ3g05nNX+f3whXzn8Ls4F2PuP1rxZFPjV8Q9c+YMgwcP5sKFC5lretkrKQRCCKuJT0ohJPIKa4PP81NQZL5ty7o6cz0hhVFdG/J/zWrQtJZtrHuVlpbG3LlzmTRpEmPGjOFf//oXxYvf/z0GtkAKgRDCYv6+msB//jzD/J0n81yOGUAp0Bom9myCa3EnenjVpFJp667AWRDHjh3DZDKhtWb37t2ZCzbaOykEQohCk5SSxrwdf3H072sEhP+dZzuv2uVpVqscvo834KGqpW3+/p/k5GQ+//xzvvjiCyZNmsSwYcMoVswunvRbIFIIhBD3RWvN9mMxzN0eQXxSKofO5704m4tTMd7v2YTnW9R+oKUajBAcHIzJZKJq1aoEBQXh7u5udKRCJ4VACHFPbiSmMG5laL6/8QP0buXGgA7uNKtV3krJCldCQgJTpkxh4cKFTJ8+nddee83mey73SwqBEOKuzsTeZOzPYUTGxRN9NSHH8Z5eNXmrswe1K5aknKuz3X9g7t69G19fX7y8vAgLC6NGjRpGR7IoKQRCiFylpmlG+gezITw61+Nt61diwas+djfUk5/r168zYcIEVq1axezZs3nxxReNjmQVUgiEENkcv3CdDWHRzNhyIsexxx6qzHs9mtC4RtlcH71ozzZt2sTgwYPp0qULhw4domLFikZHshopBEIILl5PYPqvx/j5QFSOY8WdFAtff4ROjaoakMzyLl++zDvvvMOOHTtYsGABTz31lNGRrE4KgRAOJjk1jQNn4th5PIYzl+PZeuQit5Jzrs3fwaMK459ujGdt+7zYWxArV65k5MiR9OnTB7PZTJkyZYyOZAgpBEIUcVprthy5yJajF/EPPJtvW596FRnexYMnHq5mpXTGiI6O5q233uLIkSP8/PPPtG/f3uhIhpJCIEQRFJ+Uwor9kUxafzjfdo83rELjGmXp0rg67R6y3QenFBatNYsXL+bdd99l0KBBLFu2zC4XiStsUgiEKCJSUtM4fuEGPWbuyvV4c7fyDOhQn4bVytKkZlm7n+J5r06dOsWgQYO4fPkymzdvpkWLFkZHshlSCISwU2lpmr9ibmA+f5X1odFsPXoxR5uHq5elj48bbzzmXuRm+RRUamoqc+bMYcqUKYwdO5YxY8bg7CwffVnJd0MIO3IlPgnzuWsM9z/IlfjkXNuUcnGiY8OqzOjfghLOTlZOaFuOHDmCyWTC2dmZP/74g0aNGhkdySZJIRDChsVcT2SE/0Hik1IJi7qaZztvt/I8612LAR3q41TMsYZ8cpOcnMxnn33GV199xZQpUxgyZEiRWiSusEkhEMJGXIlPYvuxGL787ThnL8fn2/bRBpVISdX8NLidfPDf4cCBAwwYMIBatWpx8OBB6tata3QkmyeFQAgDnYy5wcQ1Zi7dSOT4hRu5tqlSxoXOD1fjH23r0rRWOYcf7snLrVu3mDRpEosXL+aLL77glVdecbgL4vdLCoEQVnT072uM9A8m4uIN0nTubUo4F6Nl3QqM7vYwzWqVo3QJ+Wd6Nzt37sTX15eWLVsSHh5OtWpF+z6IwmbRnzClVHdgBuAELNRaT7vjeF3gB6BCRpvxWusAS2YSwtpS0zQB4dHM3/kX5nO5r9nfqVFVJj/XDPcqpa2czr5du3aN8ePHs27dOmbPns3zzz9vdCS7ZLFCoJRyAuYA3YAoYL9Sap3WOusdLhOBFVrrb5RSTYEAwN1SmYSwpribSfScuYvzuSzb/GSTaozu9jB1KpWkrGvRWb3TmgICAhgyZAhPPfUUZrOZChUqGB3JblmyR9AGiNBanwRQSi0HegFZC4EGbj+Rujxw3oJ5hLCK8Kir+C7Zz4VriTmOvd+jCQM7NjAgVdFx6dIl3nnnHfbs2cP3339P165djY5k9yxZCGoDkVleRwFt72gzCdislBoBlAaezO0LKaUGAYMAmQEgbNK60POM9A/O9dj0l7x5qbWbXLh8QFprVqxYwdtvv03//v0JDw+ndGkZSisMRl+F6g8s1lp/oZRqByxVSnlqrdOyNtJaLwAWAPj4+ORxiU0I69Fas+N4DJ/9eozD0TnH/SuWKk6/NnUZ3a0RxR30jt7CdP78eYYOHUpERASrV6/m0UcfNTpSkWLJQnAOqJPltVvGvqxMQHcArfVepZQrUAXIea+8EDbgSPQ15m7/i/WhOUcxG1Yrw8cveNGybgX58C8kWmsWLVrEhAkTGDZsGCtWrKBEiRJGxypyLFkI9gMNlVL1SS8A/YB/3NHmLNAVWKyUagK4AjEWzCTEPUtL06wNPcc7P4XmevzFlrX56AVPSrkY3cEuWk6ePMnAgQO5du0aW7Zswdvb2+hIRZbFfnK11ilKqeHAJtKnhn6ntT6klJoCBGmt1wFjgG+VUu+QfuH4Da21DP0Im6C1ZqP5b4b952COYy+0rM2ILh40qOqYDzKxpNTUVGbOnMnHH3/M+PHjefvtt2WROAuz6Hc3456AgDv2+WXZPgw49hMhhE3RWvNzUBQ//nkm17V9Pu/TnJdauxmQzDEcOnQIk8mEq6sr+/btw8PDw+hIDkHKrBDAnydjmbjGzImLuS/zsHJIO3zcK1k5leNISkpi2rRpzJo1i48++oiBAwfKInFWJIVAOKy0NM22YxeZsCqci9ezz/kvpmDRG4/Q/qEquDjLB5Il7d+/nwEDBlCvXj2Cg4Nxc5Mel7VJIRAO53pCMr1m7+HkpZs5jn3Yqxn/fLSezPm3gvj4ePz8/Pjxxx/56quv6Nevn3zfDSKFQDiUNcHnePunkBz7J/ZswoD29SkmSzpbxfbt2/H19aVNmzaEh4dTtWpVoyM5NCkEosiLu5nEL2Hn+WDtoWz7B3dswPinG8tvoVZ09epVxo0bR0BAAHPnzuXZZ581OpJACoEowq7eSuafC/8k/FzO2T8hft2oUMrFgFSO65dffmHo0KH06NEDs9lM+fLljY4kMkghEEXO31cTePSTLTn2j+ziQdcm1Wlep4L1QzmwmJgYRo0aRWBgIEuWLKFz585GRxJ3kEIgiozUNM3ENeH4B0Zm2/9og0osfrMNrsXlyV7WpLXG39+f0aNH8+qrrxIWFkapUqWMjiVyIYVA2L3Iy/F0/WIHSanZ1irE75mmvNneXa4BGCAqKoqhQ4dy+vRp1q1bR5s2bYyOJPIhhUDYresJyTwxfTuxN5Oy7W9dryI/DGhDGXnEo9WlpaXx7bffMnHiREaMGMF///tfXFzkWoytk38pwu5EXLzOB2sOsfdkbLb9bzzmzoQejeXh7gaJiIhg4MCBxMfHs23bNjw9PY2OJApICoGwG+FRV3l29u4c+3u3cuPzPt4yBGSQlJQUvv76a6ZNm8b777/PyJEjcXKSYmxPpBAIu7Bk72n87rgPoEvjaszq35LSMgRkmPDwcEwmE2XLluXPP//koYceMjqSuA/yL0jYtMSUVFpO+Y34pNTMfV+93JwXWsp6NEZKTExk6tSpzJ07l08++QSTySQ9MjsmhUDYpJUHovjXzzkfBLNx1OM0qVnOgETitn379mEymfDw8CAkJITatWsbHUk8ICkEwqYcOn+VnjNzXgfw7VCfic80NSCRuO3mzZt88MEH+Pv78/XXX9O3b1/pBRQRUgiEzfgl7DzDlwVn2zemWyOGdfbASRaDM9SWLVsYOHAg7du3Jzw8nCpVqhgdSRQiKQTCcOeu3KL9tK3Z9vkPfJR2D1U2KJG47cqVK4wdO5ZNmzbxzTff0LNnT6MjCQuQQiAMcfFaAjO3nmBt8HmuJ6ZkO7Z1TCd5FrANWLt2LW+99RbPPfccZrOZcuXk2kxRJYVAWE1ammbGlhPsOhHDwbNXchz/tLcXLz9S1/rBRDYXLlxg5MiRBAcHs2zZMjp27Gh0JGFhUgiExaWlaab8cpjFf5zOcax1vYr0ae3GS63dcHaSR0IaSWvNf/7zH8aMGcMbb7zB4sWLKVmypNGxhBVIIRAWtTbkHKOWh+TY/+HznvR/pI58+NuIs2fPMmTIEM6dO8eGDRvw8fExOpKwIikEwiKCTl/mpXl7s+1rWrMcM/q1oGH1sgalEndKS0tj/vz5+Pn5MWrUKN59912KFy9udCxhZVIIRKHacuQCph+Ccuz/872uVC/nakAikZfjx4/j6+tLcnIyO3bsoGlTuU/DUUkhEA8kLU2z43gMM7eeIPiOC8BOxRQLX/Ohc+NqxoQTuUpJSeGLL75g+vTp+Pn58dZbb8kicQ5OCoG4b1Fx8XT4dFuO/WVLOLNq2GMyBGSDQkNDGTBgAJUqVWL//v3Ur1/f6EjCBkghEPfkVlIqAxbvz/EsgHqVS9HTqyYvP1KHepVLG5RO5CUhIYGPPvqIBQsW8Omnn/LGG2/I8hAikxQCUSBap08B/X7P6RzH/J5pyoAO8pulrfrjjz8wmUw0adKE0NBQatasaXQkYWOkEIi7irh4nW5f7UTr/+2rU6kkW8c8QXGZ/mmzbty4wfvvv8/PP//MzJkz6d27t/QCRK6kEIg83UhMYcSyg2w7FpO5r4dXDb5+uSUuzlIAbNnmzZsZPHgwHTt2JDw8nMqVZd0mkTeLFgKlVHdgBuAELNRaT8ulTV9gEqCBUK31PyyZSdyd1pqeM3dzOPpatv2z+rfk2ea1DEolCiIuLo7Ro0ezdetW5s+fT/fu3Y2OJOyAxQqBUsoJmAN0A6KA/UqpdVrrw1naNAQmAO211nFKKZlnaLAJq8LxDzybbV+b+pVYMqANrsVliqEtW7VqFSNGjODFF1/EbDZTtqzM2hIFY8keQRsgQmt9EkAptRzoBRzO0mYgMEdrHQegtb5owTwiDxevJTBhVThbjmb/9jepWY6Nox43KJUoqL///pvhw4djNpv56aef6NChg9GRhJ2x5EBvbSAyy+uojH1ZNQIaKaX2KKX2ZQwl5aCUGqSUClJKBcXExOTWRNynn/afpc3ULTmKQNDEJ6UI2DitNT/88APe3t40atSIkJAQKQLivhh9sdgZaAg8AbgBO5VSXlrrK1kbaa0XAAsAfHx8NOKB3UxMofPn27l4PREA1+LFGPR4A/75aD2qyVIQNu/MmTMMHjyYCxcu8Ouvv9KqVSujIwk7ZskewTmgTpbXbhn7sooC1mmtk7XWp4DjpBcGYSHJqWmsCT5Hs39vyiwCAAc/6Mbopx6WImDj0tLSmD17Nq1bt6ZTp04EBgZKERAPzJI9gv1AQ6VUfdILQD/gzhlBa4D+wPdKqSqkDxWdtGAmh7YiKJJxK8Oy7Xur80OM/b/GBiUS9+Lo0aP4+voCsHv3bho3lr83UTgs1iPQWqcAw4FNwBFghdb6kFJqilLquYxmm4BYpdRhYBswVmsdm/tXFPdLa43vD/uzFYFOjaryy4gOUgTsQHJyMlOnTqVDhw7069ePnTt3ShEQhcqi1wi01gFAwB37/LJsa2B0xn/CAuZuj+CzX49l27dueHu83SoYE0jck+DgYAYMGED16tU5cOAA9erVMzqSKIKMvlgsLKjXnD2ERl7JfP14wyr88GYbihWTZQZsXUJCApMnT2bRokVMnz6d1157TZaHEBYjhaAImrnlBF/+djzbvvXDO+DlVt6gROJe7N69G5PJhLe3N2FhYdSoUcPoSKKIk0JQhKw8EMW/fg7Ntq9/mzp88qK3QYnEvbh+/ToTJkxg9erVzJo1ixdffNHoSMJBSCEoIu4sAmVLOLP6rcfwqCbLDNiDX3/9lcGDB9O1a1fMZjMVK1Y0OpJwIFIIioDhyw7yS1h05utt/3qC+lXk4TD2IDY2ltGjR7Nz504WLlxIt27djI4kHJCsJWznPtl4JFsR2DO+ixQBO6C1ZuXKlXh5eVGhQgXCw8OlCAjD3HOPQClVDOivtf6PBfKIAkpOTeP5OXs4dD59qegyJZwJ8euGszwoxuZFR0fz1ltvceTIEVauXMljjz1mdCTh4PL81FBKlVNKTVBKzVZKPaXSjSD9zt++1oso7rTzeAwN39+YWQRqVyhJ2L+fkiJg47TWfP/99zRv3pymTZsSHBwsRUDYhPx6BEuBOGAv4Au8Byjgea11iOWjiTvdTEzB9MN+9p28nLlvXPeHGdrpIZljbuNOnTrFoEGDuHz5Mps3b6ZFixZGRxIiU36FoIHW2gtAKbUQiAbqaq0TrJJMZPPfA1GMuWNq6Jx/tKKntzyI3JalpqYye/ZsPvzwQ8aNG8fo0aNxdpY5GsK25PcTmXx7Q2udqpSKkiJgfX+ejOXlBfuy7evSuBrzX20tD463cYcPH8bX1xdnZ2f++OMPGjVqZHQkIXKVXyForpS6RvpwEEDJLK+11rqcxdM5sMSUVHp/8wfmc9mfGxz4XldZKtrGJScn8+mnnzJjxgymTJnC4MGDKVZMirawXXkWAq21PKDWIFfik2gx5bds+/47tB2t61UyKJEoqAMHDjBgwABq167NgQMHqFu3rtGRhLirPAuBUsoVGAJ4AGHAdxlLSwsLWhtyjlHLQzJfd21cjXkyDGTzbt26xaRJk1i8eDFffPEFr7zyilzAF3Yjv6GhH0i/TrAL6AE0A0ZZI5Qjik9Koanfpmz7HnGvyKI3HjEokSioHTt2MHDgQFq1akV4eDjVqlUzOpIQ9yS/QtA0y6yhRUCgdSI5nr9ibtD1ix3Z9q0a9hit6sp6M7bs2rVrvPvuu6xfv545c+bQq1cvoyMJcV/yG2/IOmtIhoQs5JOAI9mKQA+vGpye1lOKgI0LCAjA09OTlJQUzGazFAFh1/LrEbTImCUE6TOFZNZQIRu3MpQVQVGZr5f5tuUxjyoGJhJ3c+nSJd5++2327t3L999/T9euXY2OJMQDy68QhGqtW1otiQO5mZhC9xk7ibx8K3PfiY+flgvCNkxrzYoVK3j77bfp378/YWFhlC4ti/uJoiG/QqCtlsKBHDwbx4tz/8i279QnPWSGiQ07f/48Q4cOJSIigtWrV/Poo48aHUmIQpVfIaimlMrzofJa6y8tkKdIu7MIvOxTh6kvekkRsFFaaxYtWsR7773H0KFDWbFiBSVKlDA6lhCFLr9C4ASU4X93FosHlLUI/PleV6rLHcI266+//mLgwIFcv36dLVu24OXlZXQkISwmv0IQrbWeYrUkRdyM309kbv/Ht60UARuVmprKjBkzmDp1KhMmTGDUqFGySJwo8vL7CZeeQCF5fs4eQiKvZL5uLzODbJLZbMZkMlGyZEn27duHh4eH0ZGEsIr8pqnIvLhCsDbkXGYRKO3ixPGPnjY2kMghKSmJyZMn07lzZ0wmE1u3bpUiIBxKfovOXc7rmLg7rTUT15j5z59nM/cdmtLdwEQiN4GBgZhMJurVq0dwcDBubm5GRxLC6mTw00LqTwjI9nrXuM4GJRG5iY+Px8/Pjx9//JGvvvqKfv36yewt4bDkDiYLmLTuUOZ2nUolCXyvK3UqlTIwkchq27ZteHt7Ex0dTXh4OP3795ciIBya9AgK2YRV4fgH/m84aNe4LgamEVldvXqVcePGERAQwNy5c3n22WeNjiSETZAeQSHqO29vtiJwaPL/GZhGZLV+/Xo8PT1RSmE2m6UICJGF9AgKSc+Zuzh0Pn2NviY1y7Fx1OMGJxIAMTExjBo1isDAQJYsWULnznKtRog7WbRHoJTqrpQ6ppSKUEqNz6ddb6WUVkr5WDKPpQSfjcssAoAUARugtWbZsmV4eXlRu3ZtwsLCpAgIkQeL9QiUUk7AHKAbEAXsV0qt01ofvqNdWdKffPanpbJYktaaF7IsHXFyag8D0wiAyMhIhg4dytmzZ1m/fj2PPCJPeRMiP5bsEbQBIrTWJ7XWScByILend3wIfAokWDCLxXT+fHvm9g8D2lCsmMw+MUpaWhrz58+nVatWtGnThqCgICkCQhSAJa8R1AYis7yOAtpmbaCUagXU0VpvUEqNzesLKaUGAYMA6tata4Go9+fDXw5zOjYegD6t3ejUqKrBiRzXiRMnGDhwIAkJCWzfvp1mzZoZHUkIu2HYrCGlVDHgS2DM3dpqrRdorX201j5Vq9rGh+2V+CQW7T6V+Xp6n+YGpnFcKSkpfP7557Rr145evXqxZ88eKQJC3CNL9gjOAXWyvHbL2HdbWcAT2J5xM08NYJ1S6jmtdZAFcxWKrM8ZPjxFpokaISwsDJPJRLly5QgMDKRBgwZGRxLCLlmyR7AfaKiUqq+UcgH6AetuH9RaX9VaV9Fau2ut3YF9gF0Uge92nyL2ZhIAn/b2opSLzMK1psTERPz8/HjyyScZMmQIv//+uxQBIR6AxT7BtNYpSqnhwCbSH3Lzndb6kFJqChCktV6X/1ewTWdibzLll/9NfOrrUyef1qKw7du3D5PJRMOGDQkJCaFWrVpGRxLC7ln0V1mtdQAQcMc+vzzaPmHJLIXlmVm7M7d3jH1C1qixkps3bzJx4kSWL1/OjBkz6NOnj3zvhSgkssTEPUhN01xPSAGgr48b9SqXNjiRY7j9qMhLly5hNpvp27evFAEhCpEMbt+DF+buydye+oI8w9bSrly5wr/+9S82b97MvHnz6NFDbtYTwhKkR1BAQacvExZ1FYCXferg7CTfOktau3Ytnp6euLi4YDabpQgIYUHSIygArTUvzdub+frTl7wNTFO0XbhwgZEjRxIcHMyyZcvo2LGj0ZGEKPLk19oCePyzbZnb371hl+vi2TytNUuXLsXb25v69esTGhoqRUAIK5EewV18sfkYUXG3AHCrWJIujasbnKjoOXv2LEOGDOH8+fMEBATQunVroyMJ4VCkR5CPLUcuMGtrBAAVShVn97vytLHClJaWxty5c2ndujXt27dn//79UgSEMID0CPJh+uF/NzkfmNjNwCRFz/Hjx/H19SUlJYWdO3fSpEkToyMJ4bCkR5CHaRuPZm5vHdMJJ1leulCkpKTw6aef8thjj/HSSy+xa9cuKQJCGEx6BLm4Gp/MvB1/AVC3UikaVC1jcKKiISQkBJPJROXKldm/fz/169c3OpIQAukR5GrUT8GZ26uHPWZgkqIhISGB999/n6eeeooRI0awadMmKQJC2BDpEeRi+7EYAHzqVaRymRIGp7Fvf/zxByaTiSZNmhAaGkrNmjWNjiSEuIMUgjtkvTYw+x+tDExi327cuMF7773HypUrmTVrFr179zY6khAiDzI0lEVCcmrmtQEX52LUKO9qcCL7tHnzZry8vLh27Rpms1mKgBA2TnoEWYz5OTRzO8RPpoveq7i4OEaPHs22bduYP38+//d/8uQ2IeyB9Aiy2BAWDUAb90ry1LF7tGrVKjw9PSlTpgzh4eFSBISwI/Jpl2HXiZjM7dmvtDQwiX35+++/GT58OGazmZ9++okOHToYHUkIcY+kR5Dhy9+OA9CoehmqlZVrA3ejtWbx4sV4e3vTqFEjQkJCpAgIYaekRwAs3XeG4LNXAPj3s82MDWMHTp8+zeDBg7l48SKbNm2iZUvpQQlhz6RHAHywxpy53d6jioFJbFtaWhqzZs3Cx8eHJ554gsDAQCkCQhQBDt8j8A88m7m9a1xnA5PYtqNHj+Lr6wvA7t27ady4scGJhBCFxeF7BLdnCpV1daZOpVIGp7E9ycnJTJ06lQ4dOtC/f3927twpRUCIIsbhewSnLt0EYEinhwxOYnsOHjyIyWSievXqHDhwgHr16hkdSQhhAQ7dI4i8HM+5K+lPH3u0QWWD09iOW7duMWHCBJ5++mneeecdNm7cKEVAiCLMoXsES/edydxuVbeCcUFsyO7duzGZTHh7exMWFkb16vJoTiGKOocuBD8HRQLwert6KOXYD565fv06EyZMYPXq1cyaNYsXX3zR6EhCCCtx2KGhhORU4uKTAejaxLF/6/3111/x9PQkPj4es9ksRUAIB+OwPYLv9pzK3O7YqKqBSYwTGxvL6NGj2blzJwsXLqRbN1loTwhH5LA9gsPnrwFQvZzjPXhGa83PP/+Mp6cnFStWJDw8XIqAEA7MYXsEkZfjAejrU8fgJNYVHR3NsGHDOHbsGKtWraJdu3ZGRxJCGMyiPQKlVHel1DGlVIRSanwux0crpQ4rpcKUUluUUlaboxgadRWAxjXKWestDaW15rvvvqN58+Z4enoSHBwsRUAIAViwR6CUcgLmAN2AKGC/Umqd1vpwlmbBgI/WOl4pNRT4DHjZUpluO51xExlA2waVLP12hjt16hSDBg0iLi6O3377jebNmxsdSQhhQyzZI2gDRGitT2qtk4DlQK+sDbTW27TW8Rkv9wFuFsyT6bXvAgGoXNqFKkX44fSpqanMmDGDRx55hG7durFv3z4pAkKIHCx5jaA2EJnldRTQNp/2JmBjbgeUUoOAQQB169Z9oFCpaZqzGdcH/vlo0b1b9vDhw5hMJlxcXPjjjz9o1KiR0ZGEEDbKJmYNKaX+CfgA03M7rrVeoLX20Vr7VK36YFM9952Mzdx++8mGD/S1bFFSUhIffvghnTp14vXXX2fbtm1SBIQQ+bJkj+AckHVKjlvGvmyUUk8C7wOdtNaJFswDwKHzVzPelyJ3N3FQUBAmk4natWtz8OBB6tRxrBlRQoj7Y8kewX6goVKqvlLKBegHrMvaQCnVEpgPPKe1vmjBLJmi4tIXmXvas4Y13s4qbt26xbhx4+jZsyfjxo1jw4YNUgSEEAVmsUKgtU4BhgObgCPACq31IaXUFKXUcxnNpgNlgJ+VUiFKqXV5fLlCszviEgAPVS1j6beyih07duDt7c3Zs2cJDw/nlVdeKXI9HSGEZVn0hjKtdQAQcMc+vyzbT1ry/XOTmqYBqFWhpLXfulBdu3aNd999l/Xr1zNnzhx69ep19z8khBC5sImLxdaiteZMbPqMoZZ2vOz0hg0b8PT0JDU1FbPZLEVACPFAHGqJiR3HYzK33SuXNjDJ/bl06RJvv/02e/fuZfHixXTp0sXoSEKIIsChegS3n0YG4FrcycAk90ZrzfLly/H09KR69eqEhYVJERBCFBqH6hGcuHADsK/nE587d45hw4YRERHB2rVrads2v3vyhBDi3jlUj2DL0QsAPFTV9oeFtNZ8++23tGjRgpYtW3Lw4EEpAkIIi3CoHkF8YioAtW18xtBff/3FwIEDuXHjBlu3bsXLy8voSEKIIsyhegSxN5MAqFOplMFJcpeamsqXX35J27Zt6dmzJ3v37pUiIISwOIfpEWitM7erl3M1MEnuzGYzJpOJUqVKsW/fPjw8PIyOJIRwEA7TI7h2KyVz28XZdk47KSmJyZMn07lzZ0wmE1u2bJEiIISwKofpESSnpQFQ0oamjQYGBmIymXB3dyc4OBg3N6s8jkEIIbJxmEJwe2mJMq7Gn3J8fDx+fn78+OOPfPXVV/Tr10/WBxJCGMZ2xkgsLCWjEDgXM/YDd9u2bXh7exMdHU14eDj9+/eXIiCEMJTxvx5bSVpGIXAyqBBcvXqVsWPHsnHjRr755hueeeYZQ3IIIcSdHK5HYEQhWL9+PZ6enhQrVgyz2SxFQAhhUxymR3AjIX3WkJMVh2FiYmIYOXIk+/fvZ+nSpTzxxBNWe28hhCgoh+kRJKWmzxo6eemmxd9La82yZcvw8vLCzc2NsLAwKQJCCJvlMD2CpJT0QtCmfiWLvk9kZCRDhw7l7NmzrF+/nkceecSi7yeEEA/KYXoENxLTh4ZKWOhmsrS0NObNm0erVq1o27YtQUFBUgSEEHbBYXoEsTcSAbiWkHKXlvfuxIkTDBw4kISEBLZv306zZs0K/T2EEMJSHKZHcHu2ULlCvKEsJSWF6dOn065dO55//nn27NkjRUAIYXccpkdwOWPlUbeKhbPyaFhYGCaTifLlyxMYGEiDBg0K5esKIYS1OUyP4FLG0FBCcuoDfZ3ExET8/Pzo2rUrQ4YM4bfffpMiIISwaw7TIyjl4pzx//tfdG7fvn2YTCYaNmxIaGgotWrVKqx4QghhGIcpBCkZq4/WuI9nEdy8eZOJEyeyfPlyZsyYQZ8+fWR9ICFEkeEwQ0MpqRmLzjnd2yn//vvveHl5ERsbi9lspm/fvlIEhBBFisP0CI5duA5AcaeCfYhfuXKFMWPG8PvvvzNv3jyefvppS8YTQgjDOEyPoELJ4gDExSfdte2aNWto1qwZrq6uhIeHSxEQQhRpDtMjuD2c06BKmTzbXLhwgREjRhASEoK/vz8dO3a0VjwhhDCMw/QIUvNZhlprzdKlS/H29qZBgwaEhoZKERBCOAyH6RGk6vRCUOyOQnD27FkGDx5MdHQ0AQEBtG7d2oh4QghhGIfpEWQ+oSxjiCgtLY05c+bQqlUrHn/8cfbv3y9FQAjhkCzaI1BKdQdmAE7AQq31tDuOlwCWAK2BWOBlrfVpS2T539AQHDt2DF9fX1JTU9m1axdNmjSxxFsKIYRdsFiPQCnlBMwBngaaAv2VUk3vaGYC4rTWHsBXwKeWypOWMTS0evUq2rdvT9++faUICCEElu0RtAEitNYnAZRSy4FewOEsbXoBkzK2VwKzlVJK64xP7UJ0/WY8AOGhYQQFBeHu7l7YbyGEEHbJktcIagORWV5HZezLtY3WOgW4ClS+8wsppQYppYKUUkExMTH3FaZSmZKUckrj008+kiIghBBZ2MWsIa31AmABgI+Pz331Fr55rU2hZhJCiKLCkj2Cc0CdLK/dMvbl2kYp5QyUJ/2isRBCCCuxZCHYDzRUStVXSrkA/YB1d7RZB7yesf0SsNUS1weEEELkzWJDQ1rrFKXUcGAT6dNHv9NaH1JKTQGCtNbrgEXAUqVUBHCZ9GIhhBDCiix6jUBrHQAE3LHPL8t2AtDHkhmEEELkz2HuLBZCCJE7KQRCCOHgpBAIIYSDk0IghBAOTtnbbE2lVAxw5j7/eBXgUiHGsQdyzo5BztkxPMg519NaV83tgN0VggehlArSWvsYncOa5Jwdg5yzY7DUOcvQkBBCODgpBEII4eAcrRAsMDqAAeScHYOcs2OwyDk71DUCIYQQOTlaj0AIIcQdpBAIIYSDK5KFQCnVXSl1TCkVoZQan8vxEkqpnzKO/6mUcjcgZqEqwDmPVkodVkqFKaW2KKXqGZGzMN3tnLO0662U0kopu59qWJBzVkr1zfi7PqSUWmbtjIWtAD/bdZVS25RSwRk/3z2MyFlYlFLfKaUuKqXMeRxXSqmZGd+PMKVUqwd+U611kfqP9CWv/wIaAC5AKND0jjbDgHkZ2/2An4zObYVz7gyUytge6gjnnNGuLLAT2Af4GJ3bCn/PDYFgoGLG62pG57bCOS8AhmZsNwVOG537Ac+5I9AKMOdxvAewEVDAo8CfD/qeRbFH0AaI0Fqf1FonAcuBXne06QX8kLG9EuiqlFJWzFjY7nrOWuttWuv4jJf7SH9inD0ryN8zwIfAp0CCNcNZSEHOeSAwR2sdB6C1vmjljIWtIOesgXIZ2+WB81bMV+i01jtJfz5LXnoBS3S6fUAFpVTNB3nPolgIagORWV5HZezLtY3WOgW4ClS2SjrLKMg5Z2Ui/TcKe3bXc87oMtfRWm+wZjALKsjfcyOgkVJqj1Jqn1Kqu9XSWUZBznkS8E+lVBTpzz8ZYZ1ohrnXf+93ZRcPrxeFRyn1T8AH6GR0FktSShUDvgTeMDiKtTmTPjz0BOm9vp1KKS+t9RUjQ1lYf2Cx1voLpVQ70p966Km1TjM6mL0oij2Cc0CdLK/dMvbl2kYp5Ux6dzLWKuksoyDnjFLqSeB94DmtdaKVslnK3c65LOAJbFdKnSZ9LHWdnV8wLsjfcxSwTmudrLU+BRwnvTDYq4KcswlYAaC13gu4kr44W1FVoH/v96IoFoL9QEOlVH2llAvpF4PX3dFmHfB6xvZLwFadcRXGTt31nJVSLYH5pBcBex83hrucs9b6qta6itbaXWvtTvp1kee01kHGxC0UBfnZXkN6bwClVBXSh4pOWjFjYSvIOZ8FugIopZqQXghirJrSutYBr2XMHnoUuKq1jn6QL1jkhoa01ilKqeHAJtJnHHyntT6klJoCBGmt1wGLSO8+RpB+UaafcYkfXAHPeTpQBvg547r4Wa31c4aFfkAFPOcipYDnvAl4Sil1GEgFxmqt7ba3W8BzHgN8q5R6h/QLx2/Y8y92Sil/0ot5lYzrHv8GigNoreeRfh2kBxABxANvPvB72vH3SwghRCEoikNDQggh7oEUAiGEcHBSCIQQwsFJIRBCCAcnhUAIIRycFAIhCkgplaqUCsnyn7tS6gml1NWM10eUUv/OaJt1/1Gl1OdG5xciL0XuPgIhLOiW1rpF1h0ZS5jv0lo/o5QqDYQopdZnHL69vyQQrJRarbXeY93IQtyd9AiEKCRa65vAAcDjjv23gBAecGEwISxFCoEQBVcyy7DQ6jsPKqUqk76m0aE79lckfb2fndaJKcS9kaEhIQoux9BQhseVUsFAGjAtYwmEJzL2h5JeBL7WWv9ttaRC3AMpBEI8uF1a62fy2q+Uqg/sU0qt0FqHWDmbEHclQ0NCWFjGctDTgHeNziJEbqQQCGEd84COGbOMhLApsvqoEEI4OOkRCCGEg5NCIIQQDk4KgRBCODgpBEII4eCkEAghhIOTQiCEEA5OCoEQQji4/weVUey99GLtmwAAAABJRU5ErkJggg==\n",
- "text/plain": [
- "