forked from 626_privacy/tensorflow_privacy
Fix usage of logging API.
PiperOrigin-RevId: 463123944
This commit is contained in:
parent
4cb0a11c4b
commit
d16f020329
5 changed files with 5 additions and 10 deletions
|
@ -92,8 +92,7 @@ def load_cifar10():
|
|||
|
||||
|
||||
def main(unused_argv):
|
||||
logger = tf.get_logger()
|
||||
logger.set_level(logging.ERROR)
|
||||
logging.set_verbosity(logging.ERROR)
|
||||
|
||||
# Load training and test data.
|
||||
x_train, y_train, x_test, y_test = load_cifar10()
|
||||
|
|
|
@ -163,8 +163,7 @@ def compute_epsilon(steps):
|
|||
|
||||
|
||||
def main(unused_argv):
|
||||
logger = tf.get_logger()
|
||||
logger.set_level(logging.INFO)
|
||||
logging.set_verbosity(logging.INFO)
|
||||
|
||||
if FLAGS.batch_size % FLAGS.microbatches != 0:
|
||||
raise ValueError('Number of microbatches should divide evenly batch_size')
|
||||
|
|
|
@ -146,8 +146,7 @@ def load_mnist():
|
|||
|
||||
|
||||
def main(unused_argv):
|
||||
logger = tf.get_logger()
|
||||
logger.set_level(logging.INFO)
|
||||
logging.set_verbosity(logging.INFO)
|
||||
|
||||
if FLAGS.dpsgd and FLAGS.batch_size % FLAGS.microbatches != 0:
|
||||
raise ValueError('Number of microbatches should divide evenly batch_size')
|
||||
|
|
|
@ -183,8 +183,7 @@ def print_privacy_guarantees(epochs, batch_size, samples, noise_multiplier):
|
|||
|
||||
|
||||
def main(unused_argv):
|
||||
logger = tf.get_logger()
|
||||
logger.set_level(logging.INFO)
|
||||
logging.set_verbosity(logging.INFO)
|
||||
|
||||
if FLAGS.data_l2_norm <= 0:
|
||||
raise ValueError('data_l2_norm must be positive.')
|
||||
|
|
|
@ -89,8 +89,7 @@ def load_mnist():
|
|||
|
||||
|
||||
def main(unused_argv):
|
||||
logger = tf.get_logger()
|
||||
logger.set_level(logging.INFO)
|
||||
logging.set_verbosity(logging.INFO)
|
||||
|
||||
# Load training and test data.
|
||||
train_data, train_labels, test_data, test_labels = load_mnist()
|
||||
|
|
Loading…
Reference in a new issue