def __init__(self):
super(DeepNet2, self).__init__()
self.conv1 = nn.Conv2d( 1, 32, kernel_size=7, stride=4, padding=3)
def __init__(self):
super(DeepNet2, self).__init__()
self.conv1 = nn.Conv2d( 1, 32, kernel_size=7, stride=4, padding=3)
- self.conv2 = nn.Conv2d( 32, 128, kernel_size=5, padding=2)
- self.conv3 = nn.Conv2d(128, 128, kernel_size=3, padding=1)
- self.conv4 = nn.Conv2d(128, 128, kernel_size=3, padding=1)
- self.conv5 = nn.Conv2d(128, 128, kernel_size=3, padding=1)
- self.fc1 = nn.Linear(2048, 512)
+ self.conv2 = nn.Conv2d( 32, 256, kernel_size=5, padding=2)
+ self.conv3 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
+ self.conv4 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
+ self.conv5 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
+ self.fc1 = nn.Linear(4096, 512)
self.fc2 = nn.Linear(512, 512)
self.fc3 = nn.Linear(512, 2)
self.fc2 = nn.Linear(512, 512)
self.fc3 = nn.Linear(512, 2)
-def train_model(model, train_set, validation_set):
+def train_model(model, model_filename, train_set, validation_set, nb_epochs_done = 0):
acc_loss = 0.0
for b in range(0, train_set.nb_batches):
input, target = train_set.get_batch(b)
acc_loss = 0.0
for b in range(0, train_set.nb_batches):
input, target = train_set.get_batch(b)
log_string('train_loss {:d} {:f}'.format(e + 1, acc_loss),
' [ETA ' + time.ctime(time.time() + dt * (args.nb_epochs - e)) + ']')
log_string('train_loss {:d} {:f}'.format(e + 1, acc_loss),
' [ETA ' + time.ctime(time.time() + dt * (args.nb_epochs - e)) + ']')
if validation_set is not None:
nb_validation_errors = nb_errors(model, validation_set)
if validation_set is not None:
nb_validation_errors = nb_errors(model, validation_set)
- train_model(model, train_set, validation_set)
- torch.save(model.state_dict(), model_filename)
+ train_model(model, model_filename, train_set, validation_set, nb_epochs_done = nb_epochs_done)
log_string('saved_model ' + model_filename)
nb_train_errors = nb_errors(model, train_set)
log_string('saved_model ' + model_filename)
nb_train_errors = nb_errors(model, train_set)