Skip to content
Snippets Groups Projects
Commit 3b333af5 authored by umlauf's avatar umlauf
Browse files

CEL

parent 68d76e5f
No related branches found
No related tags found
No related merge requests found
......@@ -109,7 +109,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
print("labels shape: ", list(new_labels_batch.shape))
preds = logits.view(-1, 2).to("cuda")
target = torch.nn.functional.softmax(torch.autograd.Variable(new_labels_batch.view(-1).to("cuda")))
target = new_labels_batch.view(-1).to("cuda")
loss_2 = cross_entropy(preds, target)
#loss_2 = SoftCrossEntropyLoss(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda"))
print("MixUp Loss: ", loss_2)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment