From eb93f3a7b09a43d6404ca23eaf62eee2e96e59b1 Mon Sep 17 00:00:00 2001 From: Francois Fleuret Date: Thu, 15 Jun 2017 14:32:43 +0200 Subject: [PATCH] Replaced SGD with Adam, make the learning rate 1e-1 again. --- cnn-svrt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cnn-svrt.py b/cnn-svrt.py index 35c664f..ad73f0c 100755 --- a/cnn-svrt.py +++ b/cnn-svrt.py @@ -65,7 +65,7 @@ args = parser.parse_args() log_file = open(args.log_file, 'w') -print('Logging into ' + args.log_file) +print(Fore.RED + 'Logging into ' + args.log_file + Style.RESET_ALL) def log_string(s): s = Fore.GREEN + time.ctime() + Style.RESET_ALL + ' ' + \ @@ -112,7 +112,7 @@ def train_model(train_input, train_target): model.cuda() criterion.cuda() - optimizer, bs = optim.SGD(model.parameters(), lr = 1e-2), 100 + optimizer, bs = optim.Adam(model.parameters(), lr = 1e-1), 100 for k in range(0, args.nb_epochs): acc_loss = 0.0 -- 2.39.5