parser.add_argument('--synthesis_sampling',
action='store_true', default = True)
+parser.add_argument('--no_checkpoint',
+ action='store_true', default = False)
+
parser.add_argument('--checkpoint_name',
type = str, default = 'checkpoint.pth')
nb_epochs_finished = 0
-try:
- checkpoint = torch.load(args.checkpoint_name, map_location = device)
- nb_epochs_finished = checkpoint['nb_epochs_finished']
- model.load_state_dict(checkpoint['model_state'])
- optimizer.load_state_dict(checkpoint['optimizer_state'])
- print(f'Checkpoint loaded with {nb_epochs_finished} epochs finished.')
+if args.no_checkpoint:
+ log_string(f'Not trying to load checkpoint.')
-except FileNotFoundError:
- print('Starting from scratch.')
-
-except:
- print('Error when loading the checkpoint.')
- exit(1)
+else:
+ try:
+ checkpoint = torch.load(args.checkpoint_name, map_location = device)
+ nb_epochs_finished = checkpoint['nb_epochs_finished']
+ model.load_state_dict(checkpoint['model_state'])
+ optimizer.load_state_dict(checkpoint['optimizer_state'])
+ log_string(f'Checkpoint loaded with {nb_epochs_finished} epochs finished.')
+
+ except FileNotFoundError:
+ log_string('Starting from scratch.')
+
+ except:
+ log_string('Error when loading the checkpoint.')
+ exit(1)
######################################################################