diff --git a/src/experiments/NEC_evaluation/evaluation.py b/src/experiments/NEC_evaluation/evaluation.py
index ea7b084fafd6b19e1d1c376b6d6254ac533db5f0..4befd7f7fccafd6912db686b87d911f1d1760590 100644
--- a/src/experiments/NEC_evaluation/evaluation.py
+++ b/src/experiments/NEC_evaluation/evaluation.py
@@ -108,5 +108,5 @@ def read_NEC_metrics(directory):
         print(f"Model: {model}, Dataset: {dataset}, Accuracy: {avg_accuracy:.2f}%")
 
 
-#run_NEC_tests_all()
+# run_NEC_tests_all()
 read_NEC_metrics("results")
diff --git a/src/experiments/finetune_T5/plotting/plot_loss.py b/src/experiments/finetune_T5/plotting/plot_loss.py
index 57c365b33cdb456f228e50df069cc28ec9f94c03..75ab038a5b48b62846e1f69b27d8f6944716453b 100644
--- a/src/experiments/finetune_T5/plotting/plot_loss.py
+++ b/src/experiments/finetune_T5/plotting/plot_loss.py
@@ -15,7 +15,7 @@ def plot_loss_curve(logfile, title):
                 eval_losses.append(float(match.group(1)))
 
     plt.figure(figsize=(10,5))
-    plt.plot(eval_losses)
+    plt.plot(eval_losses, label="Eval Loss")
 
     plt.xlabel("Epoch")
     plt.ylabel("Eval Loss")
@@ -23,7 +23,7 @@ def plot_loss_curve(logfile, title):
     plt.legend()
     plt.grid(True)
 
-    plt.savefig(f"eval_loss_{os.path.basename(logfile)}.pdf")
+    plt.savefig(f"eval_loss_{os.path.basename(logfile)}.svg")
 
 plot_loss_curve("logs/finetune_T5_MLM_entity_427082.txt", "T5 Finetuning - MLM Entity Masking")
 plot_loss_curve("logs/finetune_T5_MLM_label_427081.txt", "T5 Finetuning - MLM Label Masking")
diff --git a/src/models/T5_MLM_entity.py b/src/models/T5_MLM_entity.py
index 7c269a8aec2a5af7f8491c4a398834e09872b001..a39fe47fd79f032bd929f8a2cb588250a9b90c08 100644
--- a/src/models/T5_MLM_entity.py
+++ b/src/models/T5_MLM_entity.py
@@ -7,10 +7,23 @@ from datasets import Dataset, DatasetDict
 
 model_name = "google-t5/t5-base"
 
-print("Loading model: T5 MLM entity")
-tokenizer = T5Tokenizer.from_pretrained(model_name)
-model = T5ForConditionalGeneration.from_pretrained(model_name)
-print("Finished loading model: T5 MLM entity")
+def load_base():
+    global model
+    global tokenizer
+    print("Loading model: T5 MLM entity")
+    tokenizer = T5Tokenizer.from_pretrained(model_name)
+    model = T5ForConditionalGeneration.from_pretrained(model_name)
+    print("Finished loading model: T5 MLM entity")
+    
+
+def load_finetuned(input_dir):
+    global model
+    global tokenizer
+    print(f"Loading model: T5 MLM entity finetuned ({input_dir})")
+    tokenizer = T5Tokenizer.from_pretrained(input_dir)
+    model = T5ForConditionalGeneration.from_pretrained(input_dir)
+    print(f"Finished loading model: T5 MLM entity finetuned")
+
 
 def set_label_dict(label_dict):
     global label_representatives
@@ -113,3 +126,6 @@ def finetune_model(sentences, entities, labels, output_dir, epochs=10):
 
     model.save_pretrained(output_dir)
     tokenizer.save_pretrained(output_dir)
+
+load_base()
+# load_finetuned("./src/models/t5_mlm_entity_finetuned_model/checkpoints/checkpoint-12200")
diff --git a/src/models/T5_MLM_label.py b/src/models/T5_MLM_label.py
index 983a59b44e5a677e0d4b35f8e114e77ccd3c2255..42a8dfc9176af96d9b12ffdb8ff1785417eebc22 100644
--- a/src/models/T5_MLM_label.py
+++ b/src/models/T5_MLM_label.py
@@ -6,10 +6,23 @@ from datasets import Dataset, DatasetDict
 
 model_name = "google-t5/t5-base"
 
-print("Loading model: T5 MLM label")
-tokenizer = T5Tokenizer.from_pretrained(model_name)
-model = T5ForConditionalGeneration.from_pretrained(model_name)
-print("Finished loading model: T5 MLM label")
+def load_base():
+    global model
+    global tokenizer
+    print("Loading model: T5 MLM label")
+    tokenizer = T5Tokenizer.from_pretrained(model_name)
+    model = T5ForConditionalGeneration.from_pretrained(model_name)
+    print("Finished loading model: T5 MLM label")
+    
+
+def load_finetuned(input_dir):
+    global model
+    global tokenizer
+    print(f"Loading model: T5 MLM label finetuned ({input_dir})")
+    tokenizer = T5Tokenizer.from_pretrained(input_dir)
+    model = T5ForConditionalGeneration.from_pretrained(input_dir)
+    print(f"Finished loading model: T5 MLM label finetuned")
+
 
 def classify_entity(sentence, entity, labels):
     sentence_with_masked_hypothesis = f"{sentence} {entity} is a <extra_id_0>."
@@ -89,3 +102,6 @@ def finetune_model(sentences, entities, labels, output_dir, epochs=10):
 
     model.save_pretrained(output_dir)
     tokenizer.save_pretrained(output_dir)
+
+load_base()
+# load_finetuned("./src/models/t5_mlm_label_finetuned_model/checkpoints/checkpoint-9638")
diff --git a/src/models/T5_NLI.py b/src/models/T5_NLI.py
index 342950905d304cfb9f0c77dfe125980ce3bf65b3..f08a7372c728811a8587d727f26f6f3dcd117b10 100644
--- a/src/models/T5_NLI.py
+++ b/src/models/T5_NLI.py
@@ -139,4 +139,4 @@ def finetune_model(premises, hypotheses, entailment, output_dir, epochs=10):
     tokenizer.save_pretrained(output_dir)
 
 load_base()
-# load_finetuned("./src/models/t5_nli_finetuned_model/pretrained_CoNLL_epoch20")
+# load_finetuned("./src/models/t5_nli_finetuned_model/checkpoints/checkpoint-85500")