Skip to content
Snippets Groups Projects
Commit 9004d0a8 authored by umlauf's avatar umlauf
Browse files

new Cross

parent 48ffa4bb
No related branches found
No related tags found
No related merge requests found
......@@ -105,7 +105,8 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
print("logits: ", logits) #print("logits: ", logits.size())
print("labels size: ", new_labels_batch.size())
loss_fct = CrossEntropyLoss()
loss_fct = SoftCrossEntropyLoss()
loss_2=loss_fct(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda"))
print("MixUp Loss: ", loss_2)
......@@ -122,7 +123,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
#print(outputs[0].size())
#progress_bar.update(1)
#print("one epoch done")
#print(model_name)
evaluation_test = evaluation.evaluate_model(model, name, test_dataset, learning_rate, test_batch_size)
evaluation_train = evaluation.evaluate_model(model, name, train_dataset, learning_rate, test_batch_size)
......@@ -132,7 +133,9 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
return evaluation_test, evaluation_train
def SoftCrossEntropyLoss (input, target):
logprobs = torch.nn.functional.log_softmax (input, dim = 1)
return -(target * logprobs).sum() / input.shape[0]
def mixup_function(batch_of_matrices, batch_of_labels, l, t):
runs = math.floor(batch_of_matrices.size()[0]/2)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment