Commit a54119c7 authored by Verena Praher's avatar Verena Praher
Browse files

rename metric names for clarification

parent deeb26d5
......@@ -118,9 +118,18 @@ class CNN(pl.LightningModule):
# _, _, fscore, _ = metrics.precision_recall_fscore_support(y.t().cpu(), y_hat.t().cpu())
fscore = 0.
return {'val_loss': self.my_loss(y_hat, y),
'rocauc':rocauc,
'prauc':prauc,
'fscore':fscore}
'val_rocauc':rocauc,
'val_prauc':prauc,
'val_fscore':fscore}
def test_step(self, data_batch, batch_nb):
return test_end(data_batch, batch_nb)
def test_end(self, outputs):
test_metrics = test_end(outputs)
self.experiment.log(test_metrics)
return test_metrics
def validation_end(self, outputs):
return validation_end(outputs)
......
......@@ -3,7 +3,7 @@ import torch.nn as nn
import torch.nn.functional as F
import pytorch_lightning as pl
from models.shared_stuff import tng_dataloader, val_dataloader, test_dataloader, \
validation_end, training_step, validation_step
validation_end, training_step, validation_step, test_step, test_end
from sklearn.metrics import roc_auc_score
......
......@@ -31,22 +31,49 @@ def validation_step(model, data_batch, batch_nb):
# _, _, fscore, _ = metrics.precision_recall_fscore_support(y.t().cpu(), y_hat.t().cpu())
fscore = 0.
return {'val_loss': model.my_loss(y_hat, y),
'rocauc': rocauc,
'prauc': prauc,
'fscore': fscore}
'val_rocauc': rocauc,
'val_prauc': prauc,
'val_fscore': fscore}
def validation_end(outputs):
avg_loss = torch.stack([x['val_loss'] for x in outputs]).mean()
avg_auc = torch.stack([torch.tensor([x['rocauc']]) for x in outputs]).mean()
avg_prauc = torch.stack([torch.tensor([x['prauc']]) for x in outputs]).mean()
avg_fscore = torch.stack([torch.tensor([x['fscore']]) for x in outputs]).mean()
avg_auc = torch.stack([torch.tensor([x['val_rocauc']]) for x in outputs]).mean()
avg_prauc = torch.stack([torch.tensor([x['val_prauc']]) for x in outputs]).mean()
avg_fscore = torch.stack([torch.tensor([x['val_fscore']]) for x in outputs]).mean()
return {'val_loss': avg_loss,
'rocauc': avg_auc,
'prauc': avg_prauc,
'fscore': avg_fscore}
'val_rocauc': avg_auc,
'val_prauc': avg_prauc,
'val_fscore': avg_fscore}
def test_step(model, data_batch, batch_nb):
# print("data_batch", data_batch)
x, _, y = data_batch
# print("x", x)
# print("y", y)
y_hat = model.forward(x)
y = y.float()
y_hat = y_hat.float()
rocauc = metrics.roc_auc_score(y.t().cpu(), y_hat.t().cpu())
prauc = metrics.average_precision_score(y.t().cpu(), y_hat.t().cpu())
# _, _, fscore, _ = metrics.precision_recall_fscore_support(y.t().cpu(), y_hat.t().cpu())
fscore = 0.
return {'test_loss': model.my_loss(y_hat, y),
'test_rocauc': rocauc,
'test_prauc': prauc,
'test_fscore': fscore}
def test_end(outputs):
avg_loss = torch.stack([x['test_loss'] for x in outputs]).mean()
avg_auc = torch.stack([torch.tensor([x['test_rocauc']]) for x in outputs]).mean()
avg_prauc = torch.stack([torch.tensor([x['test_prauc']]) for x in outputs]).mean()
avg_fscore = torch.stack([torch.tensor([x['test_fscore']]) for x in outputs]).mean()
return {'test_loss': avg_loss,
'test_rocauc': avg_auc,
'test_prauc': avg_prauc,
'test_fscore': avg_fscore}
def tng_dataloader():
train_csv = os.path.join(PATH_ANNOTATIONS, 'train_processed.tsv')
cache_x_name = "_ap_mtgjamendo44k"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment