Skip to content
Snippets Groups Projects
Commit fc869575 authored by umlauf's avatar umlauf
Browse files

loss with epochs

parent 5fde4e94
No related branches found
No related tags found
No related merge requests found
......@@ -3,7 +3,7 @@
import matplotlib.pyplot as plt
import re
with open("loss.txt", "r") as f:
with open("output_baselines_loss.txt", "r") as f:
loss_file = f.read()
pattern = r"\((\d+\.\d+)"
......
......@@ -46,6 +46,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
for epoch in range(num_epochs):
#for param_tensor in model.state_dict():
# print(param_tensor, "\t", model.state_dict()[param_tensor])
print("Epoche: " epoch)
index=0
for batch in train_dataloader:
if name[0] == "b":
......@@ -110,7 +111,7 @@ def train(model, name, seed,gradient_accumulation_steps,mixup, threshold, lambda
#add mira 1 line
#new_labels_batch = new_labels_batch.to(torch.float64)
loss_2=loss_fct(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda")).to(torch.float32)
loss_2=loss_fct(logits.view(-1, 2).to("cuda"), new_labels_batch.view(-1).to("cuda"))#.to(torch.float32)
print("MixUp Loss: ", loss_2)
#update entire model
loss_2.backward()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment