Skip to content
Snippets Groups Projects
Commit 11c63b87 authored by weber's avatar weber
Browse files

Merge remote-tracking branch 'origin/master'

parents 244b64c4 95fcf1c4
No related branches found
No related tags found
No related merge requests found
"""
@requirements: cora.py
@info: Will be imported from node_classification.
"""
import cora import cora
import pickle as pkl import pickle as pkl
import random import random
...@@ -21,7 +27,7 @@ def get_random_num_nodes(set_elm, num, seed): ...@@ -21,7 +27,7 @@ def get_random_num_nodes(set_elm, num, seed):
random.seed(seed) random.seed(seed)
return set(random.sample(set_elm, num)) return set(random.sample(set_elm, num))
def get_num_random_nodes_for_all_classes_read(path = "/home/utaemon/SP/graph.pkl", num = 20, seed = 1): def get_num_random_nodes_for_all_classes_read(path = "graph.pkl", num = 20, seed = 1):
"""get specific number of nodes per class, same number for all classes""" """get specific number of nodes per class, same number for all classes"""
cora_dict = dict_of_node_classes_read(path) cora_dict = dict_of_node_classes_read(path)
sampled_random_id_set = set() sampled_random_id_set = set()
...@@ -29,17 +35,3 @@ def get_num_random_nodes_for_all_classes_read(path = "/home/utaemon/SP/graph.pkl ...@@ -29,17 +35,3 @@ def get_num_random_nodes_for_all_classes_read(path = "/home/utaemon/SP/graph.pkl
for id in get_random_num_nodes(cora_dict[key], num, seed): for id in get_random_num_nodes(cora_dict[key], num, seed):
sampled_random_id_set.add(id) sampled_random_id_set.add(id)
return sampled_random_id_set return sampled_random_id_set
#unused
'''
def get_num_of_random_nodes(path = "/home/utaemon/SP/graph.pkl", seed=0, num = 3):
"""Get random nodes."""
random.seed(seed)
cora_nodes = set(read_graph(path).nodes)
return set(random.sample(cora_nodes, num))
def pickle_output(method = get_num_random_nodes_for_all_classes_read(), output = "random_nodes.pkl"):
with open(output, "wb") as file:
pkl.dump(method, file)
'''
\ No newline at end of file
File deleted
File deleted
C=0.1
max
0.652
avg
0.5900960000000008
[0.561, 0.574, 0.566, 0.595, 0.594, 0.578, 0.596, 0.578, 0.566, 0.54, 0.582, 0.591, 0.559, 0.581, 0.555, 0.618, 0.615, 0.6, 0.579, 0.572, 0.554, 0.599, 0.626, 0.614, 0.566, 0.585, 0.566, 0.597, 0.612, 0.613, 0.566, 0.579, 0.611, 0.562, 0.627, 0.624, 0.618, 0.599, 0.61, 0.612, 0.613, 0.551, 0.627, 0.581, 0.613, 0.587, 0.593, 0.574, 0.593, 0.572, 0.616, 0.606, 0.59, 0.576, 0.615, 0.589, 0.605, 0.572, 0.558, 0.553, 0.613, 0.597, 0.614, 0.575, 0.593, 0.621, 0.587, 0.55, 0.595, 0.597, 0.56, 0.599, 0.563, 0.6, 0.569, 0.584, 0.581, 0.574, 0.572, 0.618, 0.635, 0.591, 0.583, 0.549, 0.571, 0.593, 0.561, 0.594, 0.58, 0.597, 0.607, 0.6, 0.61, 0.571, 0.584, 0.61, 0.579, 0.583, 0.616, 0.608, 0.578, 0.584, 0.613, 0.617, 0.583, 0.557, 0.581, 0.595, 0.564, 0.562, 0.621, 0.613, 0.54, 0.6, 0.592, 0.613, 0.625, 0.577, 0.609, 0.606, 0.571, 0.611, 0.559, 0.62, 0.588, 0.604, 0.614, 0.565, 0.617, 0.604, 0.601, 0.62, 0.565, 0.585, 0.642, 0.553, 0.574, 0.599, 0.586, 0.599, 0.582, 0.627, 0.569, 0.608, 0.634, 0.618, 0.604, 0.565, 0.583, 0.564, 0.595, 0.587, 0.564, 0.564, 0.589, 0.589, 0.582, 0.594, 0.624, 0.595, 0.586, 0.629, 0.561, 0.568, 0.563, 0.6, 0.576, 0.575, 0.627, 0.591, 0.628, 0.636, 0.579, 0.573, 0.599, 0.633, 0.589, 0.578, 0.568, 0.623, 0.559, 0.585, 0.567, 0.622, 0.632, 0.617, 0.615, 0.592, 0.618, 0.596, 0.596, 0.588, 0.578, 0.564, 0.571, 0.572, 0.652, 0.55, 0.571, 0.638, 0.583, 0.558, 0.567, 0.608, 0.597, 0.538, 0.579, 0.601, 0.603, 0.593, 0.603, 0.598, 0.572, 0.604, 0.578, 0.595, 0.628, 0.603, 0.603, 0.588, 0.598, 0.557, 0.59, 0.586, 0.568, 0.582, 0.608, 0.589, 0.593, 0.59, 0.582, 0.576, 0.603, 0.577, 0.606, 0.579, 0.609, 0.615, 0.591, 0.559, 0.549, 0.591, 0.614, 0.607, 0.576, 0.586, 0.617, 0.594, 0.57, 0.606, 0.593, 0.553, 0.592, 0.581, 0.58, 0.613, 0.609, 0.579, 0.582, 0.615, 0.619, 0.569, 0.579, 0.599, 0.591, 0.61, 0.592, 0.572, 0.626, 0.619, 0.573, 0.608, 0.615, 0.579, 0.583, 0.574, 0.604, 0.603, 0.602, 0.618, 0.551, 0.616, 0.597, 0.576, 0.587, 0.586, 0.593, 0.581, 0.584, 0.589, 0.574, 0.594, 0.58, 0.585, 0.598, 0.597, 0.581, 0.62, 0.587, 0.626, 0.617, 0.613, 0.579, 0.57, 0.616, 0.594, 0.58, 0.616, 0.631, 0.558, 0.589, 0.598, 0.587, 0.562, 0.572, 0.594, 0.59, 0.587, 0.583, 0.6, 0.609, 0.588, 0.601, 0.568, 0.6, 0.624, 0.604, 0.564, 0.569, 0.618, 0.639, 0.566, 0.624, 0.609, 0.556, 0.579, 0.62, 0.59, 0.591, 0.521, 0.607, 0.597, 0.557, 0.597, 0.619, 0.573, 0.574, 0.608, 0.537, 0.603, 0.601, 0.587, 0.607, 0.635, 0.573, 0.603, 0.572, 0.59, 0.572, 0.619, 0.617, 0.552, 0.586, 0.594, 0.584, 0.604, 0.621, 0.591, 0.605, 0.567, 0.606, 0.601, 0.57, 0.606, 0.552, 0.572, 0.584, 0.577, 0.587, 0.612, 0.542, 0.584, 0.581, 0.589, 0.596, 0.588, 0.619, 0.559, 0.575, 0.602, 0.607, 0.616, 0.576, 0.631, 0.623, 0.595, 0.597, 0.568, 0.574, 0.602, 0.568, 0.613, 0.608, 0.599, 0.595, 0.632, 0.588, 0.589, 0.585, 0.596, 0.598, 0.634, 0.582, 0.613, 0.609, 0.632, 0.588, 0.597, 0.553, 0.56, 0.575, 0.567, 0.609, 0.531, 0.584, 0.579, 0.584, 0.568, 0.601, 0.601, 0.603, 0.573, 0.594, 0.564, 0.608, 0.593, 0.584, 0.581, 0.556, 0.584, 0.587, 0.585, 0.586, 0.625, 0.603, 0.615, 0.631, 0.611, 0.611, 0.591, 0.585, 0.559, 0.58, 0.573, 0.614, 0.612, 0.6, 0.605, 0.557, 0.575, 0.574, 0.576, 0.62, 0.624, 0.609, 0.571, 0.592, 0.58, 0.596, 0.613, 0.581, 0.595, 0.58, 0.587, 0.576, 0.602, 0.58, 0.585, 0.604, 0.623, 0.575, 0.603, 0.565, 0.596, 0.563, 0.604, 0.608, 0.593, 0.578, 0.603, 0.625, 0.598, 0.583, 0.605, 0.581, 0.551, 0.59, 0.556, 0.59, 0.597, 0.605, 0.585, 0.598, 0.618, 0.589, 0.576, 0.601, 0.591, 0.617, 0.565, 0.57, 0.568, 0.593, 0.591, 0.566, 0.581, 0.604, 0.61, 0.594, 0.595, 0.586, 0.607, 0.552, 0.614, 0.604, 0.619, 0.57, 0.564, 0.612, 0.639, 0.601, 0.626, 0.602, 0.564, 0.588, 0.592, 0.577, 0.56, 0.585, 0.579, 0.578, 0.583, 0.561, 0.609, 0.549, 0.598, 0.625, 0.607, 0.611, 0.587, 0.61, 0.614, 0.572, 0.602, 0.594, 0.601, 0.617, 0.579, 0.602, 0.583, 0.589, 0.608, 0.559, 0.604, 0.618, 0.611, 0.588, 0.614, 0.594, 0.601, 0.58, 0.567, 0.603, 0.531, 0.579, 0.646, 0.573, 0.598, 0.565, 0.58, 0.596, 0.587, 0.553, 0.586, 0.589, 0.599, 0.58, 0.6, 0.595, 0.563, 0.611, 0.618, 0.575, 0.586, 0.588, 0.588, 0.598, 0.563, 0.564, 0.594, 0.637, 0.58, 0.568, 0.6, 0.605, 0.609, 0.569, 0.589, 0.607, 0.588, 0.6, 0.602, 0.615, 0.577, 0.583, 0.613, 0.622, 0.576, 0.576, 0.616, 0.596, 0.57, 0.609, 0.555, 0.614, 0.601, 0.578, 0.569, 0.625, 0.576, 0.601, 0.57, 0.591, 0.569, 0.575, 0.593, 0.62, 0.557, 0.578, 0.559, 0.55, 0.586, 0.598, 0.565, 0.596, 0.577, 0.593, 0.557, 0.598, 0.562, 0.621, 0.606, 0.556, 0.593, 0.591, 0.577, 0.544, 0.588, 0.57, 0.585, 0.575, 0.596, 0.619, 0.567, 0.585, 0.58, 0.593, 0.559, 0.576, 0.587, 0.613, 0.594, 0.624, 0.603, 0.549, 0.585, 0.543, 0.585, 0.586, 0.596, 0.584, 0.578, 0.571, 0.601, 0.59, 0.557, 0.583, 0.584, 0.569, 0.575, 0.569, 0.595, 0.589, 0.597, 0.592, 0.556, 0.57, 0.558, 0.588, 0.635, 0.604, 0.634, 0.59, 0.62, 0.566, 0.58, 0.636, 0.565, 0.545, 0.617, 0.599, 0.611, 0.605, 0.562, 0.593, 0.571, 0.588, 0.627, 0.557, 0.549, 0.583, 0.615, 0.596, 0.62, 0.554, 0.604, 0.598, 0.589, 0.598, 0.614, 0.547, 0.61, 0.547, 0.582, 0.588, 0.578, 0.597, 0.613, 0.581, 0.549, 0.579, 0.553, 0.543, 0.584, 0.632, 0.568, 0.592, 0.612, 0.58, 0.567, 0.61, 0.562, 0.574, 0.551, 0.589, 0.603, 0.621, 0.576, 0.63, 0.613, 0.599, 0.599, 0.602, 0.578, 0.546, 0.596, 0.583, 0.622, 0.641, 0.575, 0.547, 0.589, 0.569, 0.601, 0.602, 0.567, 0.628, 0.584, 0.58, 0.567, 0.578, 0.599, 0.586, 0.58, 0.636, 0.593, 0.63, 0.582, 0.589, 0.605, 0.519, 0.585, 0.588, 0.595, 0.605, 0.585, 0.606, 0.587, 0.581, 0.579, 0.608, 0.54, 0.621, 0.618, 0.599, 0.614, 0.617, 0.609, 0.564, 0.639, 0.57, 0.577, 0.573, 0.61, 0.599, 0.55, 0.586, 0.52, 0.576, 0.617, 0.587, 0.61, 0.591, 0.597, 0.561, 0.558, 0.586, 0.596, 0.551, 0.509, 0.614, 0.587, 0.573, 0.616, 0.608, 0.57, 0.606, 0.601, 0.585, 0.587, 0.594, 0.528, 0.57, 0.625, 0.607, 0.603, 0.571, 0.62, 0.595, 0.593, 0.614, 0.604, 0.566, 0.592, 0.574, 0.594, 0.592, 0.604, 0.602, 0.603, 0.582, 0.615, 0.588, 0.589, 0.625, 0.603, 0.574, 0.574, 0.595, 0.588, 0.592, 0.565, 0.604, 0.562, 0.589, 0.565, 0.559, 0.572, 0.581, 0.557, 0.613, 0.584, 0.57, 0.592, 0.581, 0.589, 0.597, 0.577, 0.637, 0.59, 0.599, 0.568, 0.612, 0.634, 0.586, 0.586, 0.609, 0.581, 0.608, 0.58, 0.565, 0.595, 0.575, 0.584, 0.563, 0.56, 0.624, 0.589, 0.621, 0.597, 0.575, 0.57, 0.597, 0.566, 0.582, 0.575, 0.584, 0.618, 0.586, 0.63, 0.584, 0.613, 0.649, 0.603, 0.575, 0.574, 0.575, 0.577, 0.605, 0.608, 0.622, 0.582, 0.58, 0.573, 0.581, 0.584, 0.542, 0.572, 0.589, 0.577, 0.592, 0.577, 0.584, 0.6, 0.581, 0.628, 0.616, 0.564, 0.578, 0.574, 0.602, 0.587, 0.602, 0.596, 0.602, 0.57, 0.606, 0.572, 0.595, 0.554, 0.578, 0.591, 0.549, 0.598, 0.613, 0.545, 0.573, 0.635, 0.587, 0.611, 0.589, 0.598, 0.611, 0.611, 0.576, 0.564, 0.575, 0.566, 0.601, 0.642, 0.584, 0.616, 0.565, 0.608, 0.606, 0.585, 0.587, 0.592, 0.612, 0.583, 0.593, 0.612, 0.6, 0.583, 0.629, 0.616, 0.594, 0.598, 0.58, 0.572]
C Bestimmung
/usr/bin/python3 /home/utaemon/PycharmProjects/Softwareprojekt/node_classification.py
Iteration/Random Seed: 0
C = 0.01
0.558
C = 0.1
0.564
C = 0.5
0.564
C = 1.0
0.561
C = 5.0
0.542
C = 10.0
0.544
Iteration/Random Seed: 1
C = 0.01
0.642
C = 0.1
0.635
C = 0.5
0.628
C = 1.0
0.619
C = 5.0
0.611
C = 10.0
0.61
Iteration/Random Seed: 2
C = 0.01
0.549
C = 0.1
0.57
C = 0.5
0.553
C = 1.0
0.548
C = 5.0
0.539
C = 10.0
0.538
Iteration/Random Seed: 3
C = 0.01
0.629
C = 0.1
0.624
C = 0.5
0.608
C = 1.0
0.603
C = 5.0
0.578
C = 10.0
0.575
Iteration/Random Seed: 4
C = 0.01
0.62
C = 0.1
0.638
C = 0.5
0.632
C = 1.0
0.632
C = 5.0
0.629
C = 10.0
0.631
Iteration/Random Seed: 5
C = 0.01
0.595
C = 0.1
0.615
C = 0.5
0.62
C = 1.0
0.622
C = 5.0
0.616
C = 10.0
0.61
Iteration/Random Seed: 6
C = 0.01
0.607
C = 0.1
0.626
C = 0.5
0.618
C = 1.0
0.61
C = 5.0
0.598
C = 10.0
0.589
Iteration/Random Seed: 7
C = 0.01
0.596
C = 0.1
0.603
C = 0.5
0.587
C = 1.0
0.581
C = 5.0
0.563
C = 10.0
0.561
Iteration/Random Seed: 8
C = 0.01
0.602
C = 0.1
0.619
C = 0.5
0.617
C = 1.0
0.611
C = 5.0
0.608
C = 10.0
0.605
Iteration/Random Seed: 9
C = 0.01
0.58
C = 0.1
0.595
C = 0.5
0.585
C = 1.0
0.577
C = 5.0
0.563
C = 10.0
0.56
Process finished with exit code 0
*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*
*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*
*x* *x*
*x* Copyright (C) 1995 University of Pennsylvania *x*
*x* *x*
*x* The data in this file are part of a preliminary version of the *x*
*x* Penn Treebank Corpus and should not be redistributed. Any *x*
*x* research using this corpus or based on it should acknowledge *x*
*x* that fact, as well as the preliminary nature of the corpus. *x*
*x* *x*
*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*
*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*x*
( (S ( (S
(NP-SBJ (DT That) ) (NP-SBJ (DT That) )
......
...@@ -29,6 +29,9 @@ def get_stopword_list(stop_path): ...@@ -29,6 +29,9 @@ def get_stopword_list(stop_path):
stopli = [] stopli = []
for line in f: for line in f:
stopli.append(line[:-1]) stopli.append(line[:-1])
stopli.append("n't")
stopli.append("'m")
stopli.append("whether")
return stopli return stopli
def get_infos(tree_path): def get_infos(tree_path):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment