Commit 14d30ff5 authored by Richard Vogl's avatar Richard Vogl
Browse files

model changes

parent 5f01f82d
......@@ -113,17 +113,19 @@ def run(model, model_name, learn_rate, batch_size, split, k_samples):
valid_loss[epoch] = valid_loss_sum / valid_batches
# Then we print the results for this epoch:
print("Epoch %3d of %d took %1.3f s (valid: %1.3f s) -- patience: %d" %
print("\rEpoch %3d of %d took %1.3f s (valid: %1.3f s) -- patience: %d " %
(epoch + 1, MAX_NUMEPOCHS, time.time() - start_time, time.time() - valid_start_time, cur_patience))
error = train_loss_sum / train_batches
print(" training loss: %1.3f valid loss: %1.3f" % (error, valid_loss[epoch]))
if epoch == 0 or valid_loss[epoch] < valid_loss[best_valid_loss_epoch]:
better = valid_loss[epoch] < valid_loss[best_valid_loss_epoch]
if epoch == 0 or better:
best_valid_loss_epoch = epoch
np.savez(os.path.join(out_directory, 'best_model.npz'), *lasagne.layers.get_all_param_values(network))
print(' new best validation loss at epoch %3d: %1.3f' % (epoch, valid_loss[epoch]))
np.save(os.path.join(out_directory, 'losses.npy'), [train_loss[:epoch], valid_loss[:epoch]])
if epoch > 0 and valid_loss[epoch] >= valid_loss[best_valid_loss_epoch]:
if epoch > 0 and not better:
cur_patience -= 1
if cur_patience <= 0:
if refinements > 0:
......@@ -139,7 +141,7 @@ def run(model, model_name, learn_rate, batch_size, split, k_samples):
cur_patience = patience
# Optionally, you could now dump the network weights to a file like this:
np.savez(os.path.join(out_directory, 'best_model.npz'), *lasagne.layers.get_all_param_values(network))
#
# And load them again later on like this:
# with np.load('model.npz') as f:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment