From b087892285dbe2889b1d9bd9844ef17443604e09 Mon Sep 17 00:00:00 2001 From: Pierre Nodet Date: Wed, 16 Jun 2021 10:14:51 +0200 Subject: [PATCH] Fix issue #654 (#655) Avoid two softmax calls in LogisticRegression Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- pl_bolts/models/regression/logistic_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pl_bolts/models/regression/logistic_regression.py b/pl_bolts/models/regression/logistic_regression.py index 21178f5e17..f283c8182b 100644 --- a/pl_bolts/models/regression/logistic_regression.py +++ b/pl_bolts/models/regression/logistic_regression.py @@ -53,7 +53,7 @@ def training_step(self, batch, batch_idx): # flatten any input x = x.view(x.size(0), -1) - y_hat = self(x) + y_hat = self.linear(x) # PyTorch cross_entropy function combines log_softmax and nll_loss in single function loss = F.cross_entropy(y_hat, y, reduction='sum')