Wres: epsilon in args
This commit is contained in:
parent
aa190cd4f1
commit
7208c16efc
1 changed files with 10 additions and 9 deletions
|
@ -12,6 +12,8 @@ from tqdm import tqdm
|
||||||
import opacus
|
import opacus
|
||||||
from opacus.validators import ModuleValidator
|
from opacus.validators import ModuleValidator
|
||||||
from opacus.utils.batch_memory_manager import BatchMemoryManager
|
from opacus.utils.batch_memory_manager import BatchMemoryManager
|
||||||
|
import warnings
|
||||||
|
warnings.filterwarnings("ignore")
|
||||||
|
|
||||||
|
|
||||||
def set_seed(seed=42):
|
def set_seed(seed=42):
|
||||||
|
@ -76,18 +78,15 @@ def train_no_cap(net, epochs, data_loader, device, optimizer, criterion, schedul
|
||||||
return best_test_set_accuracy
|
return best_test_set_accuracy
|
||||||
|
|
||||||
|
|
||||||
def _train_seed(net, loaders, device, dataset, log=False, logfile='', epochs=200, norm=1.0):
|
def _train_seed(net, loaders, device, dataset, log=False, logfile='', epochs=200, norm=1.0, dp_epsilon=None):
|
||||||
train_loader, test_loader = loaders
|
train_loader, test_loader = loaders
|
||||||
|
|
||||||
dp_epsilon = None
|
|
||||||
dp_delta = 1e-5
|
dp_delta = 1e-5
|
||||||
checkpointFile = 'wrn-{}-{}e-{}d-{}n-dict.pt'.format(int(time.time()), dp_epsilon, dp_delta, norm)
|
checkpointFile = 'wrn-{}-{}e-{}d-{}n-dict.pt'.format(int(time.time()), dp_epsilon, dp_delta, norm)
|
||||||
|
|
||||||
if dp_epsilon is not None:
|
#net = ModuleValidator.fix(net, replace_bn_with_in=True)
|
||||||
print(f"DP epsilon = {dp_epsilon}, delta = {dp_delta}")
|
net = ModuleValidator.fix(net)
|
||||||
#net = ModuleValidator.fix(net, replace_bn_with_in=True)
|
ModuleValidator.validate(net, strict=True)
|
||||||
net = ModuleValidator.fix(net)
|
|
||||||
ModuleValidator.validate(net, strict=True)
|
|
||||||
|
|
||||||
criterion = nn.CrossEntropyLoss()
|
criterion = nn.CrossEntropyLoss()
|
||||||
optimizer = optim.SGD(net.parameters(), lr=0.1, momentum=0.9, nesterov=True, weight_decay=5e-4)
|
optimizer = optim.SGD(net.parameters(), lr=0.1, momentum=0.9, nesterov=True, weight_decay=5e-4)
|
||||||
|
@ -105,7 +104,8 @@ def _train_seed(net, loaders, device, dataset, log=False, logfile='', epochs=200
|
||||||
max_grad_norm=norm,
|
max_grad_norm=norm,
|
||||||
)
|
)
|
||||||
|
|
||||||
print(f"Using sigma={optimizer.noise_multiplier} and C={1.0}, norm = {norm}")
|
print(f"DP epsilon = {dp_epsilon}, delta = {dp_delta}")
|
||||||
|
print(f"Using sigma={optimizer.noise_multiplier} and C = norm = {norm}")
|
||||||
else:
|
else:
|
||||||
print("Training without differential privacy")
|
print("Training without differential privacy")
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ def train(args):
|
||||||
net = net.to(device)
|
net = net.to(device)
|
||||||
|
|
||||||
epochs = training_configurations.epochs
|
epochs = training_configurations.epochs
|
||||||
best_test_set_accuracy = _train_seed(net, loaders, device, dataset, log, logfile, epochs, args.norm)
|
best_test_set_accuracy = _train_seed(net, loaders, device, dataset, log, logfile, epochs, args.norm, args.epsilon)
|
||||||
|
|
||||||
if log:
|
if log:
|
||||||
with open(logfile, 'a') as temp:
|
with open(logfile, 'a') as temp:
|
||||||
|
@ -192,6 +192,7 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('-config', '--config', help='Training Configurations', required=True)
|
parser.add_argument('-config', '--config', help='Training Configurations', required=True)
|
||||||
parser.add_argument('--norm', type=float, help='dpsgd norm clip factor', required=True)
|
parser.add_argument('--norm', type=float, help='dpsgd norm clip factor', required=True)
|
||||||
parser.add_argument('--cuda', type=int, help='gpu index', required=False)
|
parser.add_argument('--cuda', type=int, help='gpu index', required=False)
|
||||||
|
parser.add_argument('--epsilon', type=float, help='dp epsilon', required=False, default=None)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue