Skip to content
Snippets Groups Projects
Commit 2d371c1a authored by umlauf's avatar umlauf
Browse files

back to softmax

parent b0e532cb
No related branches found
No related tags found
No related merge requests found
......@@ -143,7 +143,7 @@ def cross_entropy(logits, target):
logprobs = torch.nn.functional.log_softmax (logits, dim = 1)
return -torch.mean(torch.sum(target * logprobs, dim=1))
#without softmax
#without softmax -> gives all values 0.0 (acc. around 70)
# def cross_entropy(logits, target):
# log_probs = -torch.log(torch.exp(logits) / torch.sum(torch.exp(logits), dim=1, keepdim=True))
# loss = -torch.mean(torch.sum(target * log_probs, dim=1))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment