Skip to content
Snippets Groups Projects
Commit 5ab1825a authored by umlauf's avatar umlauf
Browse files

crossentropy

parent 5d13fc81
No related branches found
No related tags found
No related merge requests found
......@@ -103,10 +103,10 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
#print("span output: ", span_output)
logits=model.classifier(span_output.detach()) #target_value?
# print("logits: ", logits)
# print("logits shape: ", list(logits.shape))
print("logits: ", logits)
print("logits shape: ", list(logits.shape))
print("Newlabels: ", new_labels_batch)
# print("labels shape: ", list(new_labels_batch.shape))
print("labels shape: ", list(new_labels_batch.shape))
logits = logits.view(-1, 2).to("cuda")
target = new_labels_batch.view(-1).to("cuda")
......@@ -137,22 +137,14 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
return evaluation_test, evaluation_train
#pytorch Forum, try with dim=1 for targets, changed from /logits.shape[0] zu .mean..
# def cross_entropy(logits, target):
# print("Cross Entropy Target: ", target)
# logprobs = torch.nn.functional.log_softmax (logits, dim = 1)
# return -torch.mean(torch.sum(target * logprobs, dim=1))
#-L(y, y') = -1/N * ∑( ∑(y'i * log(softmax(logits_i))) )
def cross_entropy(logits, target):
log_softmax = torch.nn.functional.log_softmax(logits, dim=1)
loss = -torch.sum(target * log_softmax, dim=1)
return loss.mean()
#without softmax -> gives all values 0.0 (acc. around 70)
# def cross_entropy(logits, target):
# log_probs = -torch.log(torch.exp(logits) / torch.sum(torch.exp(logits), dim=1, keepdim=True))
# loss = -torch.mean(torch.sum(target * log_probs, dim=1))
# return loss
#def cross_entropy(logits, target):
def mixup_function(batch_of_matrices, batch_of_labels, l, t):
runs = math.floor(batch_of_matrices.size()[0]/2)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment