Skip to content
Snippets Groups Projects
Commit 0d2c572d authored by umlauf's avatar umlauf
Browse files

Check Lamda value

parent b108b149
No related branches found
No related tags found
No related merge requests found
...@@ -112,7 +112,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda ...@@ -112,7 +112,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
print("logits: ", logits) print("logits: ", logits)
target = new_labels_batch.view(-1).to("cuda") target = new_labels_batch.view(-1).to("cuda")
print("Newlabels: ", new_labels_batch) print("Newlabels: ", new_labels_batch)
loss_2 = cross_entropy(logits, target) loss_2 = cross_entropy(logits, target, lambda_value)
#loss_2 = SoftCrossEntropyLoss(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda")) #loss_2 = SoftCrossEntropyLoss(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda"))
#loss_2 = torch.nn.functional.cross_entropy(preds, target.long()) #loss_2 = torch.nn.functional.cross_entropy(preds, target.long())
print("MixUp Loss: ", loss_2) print("MixUp Loss: ", loss_2)
...@@ -142,7 +142,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda ...@@ -142,7 +142,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
#log base e #log base e
#Fkt vom Meeting #Fkt vom Meeting
def cross_entropy(logits, target, l): def cross_entropy(logits, target, l):
print("Lambda:", l) print("Lamda Value:", l)
#makes the logits in log (base) probabilities #makes the logits in log (base) probabilities
logprobs = torch.nn.functional.log_softmax(logits, dim=1) logprobs = torch.nn.functional.log_softmax(logits, dim=1)
value = target.item() #gets Item (0. or 1.) value = target.item() #gets Item (0. or 1.)
...@@ -154,21 +154,6 @@ def cross_entropy(logits, target, l): ...@@ -154,21 +154,6 @@ def cross_entropy(logits, target, l):
else: else:
print("Mixed Case") print("Mixed Case")
#kann weg
# def cross_entropy(logits, target):
# log_q = torch.log_softmax(logits, dim=1)
# return -torch.sum(log_q[range(log_q.shape[0]), target])
# def cross_entropy(logits, target):
# # Calculate log_q
# log_q = torch.log_softmax(logits, dim=1)
# #define classes/options
# target_class = (target == 0).float()
# target_class2 = (target == 1).float()
# target = target.float()
# # calculate sum of losses of batch size
# return -(target_class * log_q[:, 0] + target_class2 * log_q[:, 1] + (1 - target_class - target_class2) * (target * log_q[:, 1] + (1 - target) * log_q[:, 0]))
def mixup_function(batch_of_matrices, batch_of_labels, l, t): def mixup_function(batch_of_matrices, batch_of_labels, l, t):
runs = math.floor(batch_of_matrices.size()[0]/2) runs = math.floor(batch_of_matrices.size()[0]/2)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment