From bc8008b60ace43c30b8e0eeab1e3738cc1d733f6 Mon Sep 17 00:00:00 2001 From: friebolin <friebolin@cl.uni-heidelberg.de> Date: Fri, 24 Feb 2023 16:04:48 +0100 Subject: [PATCH] Change print --- inference.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/inference.py b/inference.py index 7117c11..b85f07b 100644 --- a/inference.py +++ b/inference.py @@ -99,8 +99,8 @@ for batch in train_dataloader: prediction = torch.argmax(outputs[0]) print(prediction) if prediction == 1: - print("metonymy") + print("Metonymy! The target word has metonymic meaning in this context.") elif prediction == 0: - print("literal") + print("Literal! The target word has literal meaning in this context.") -- GitLab