Commit 54c8a12d authored by Alexander Lercher's avatar Alexander Lercher

Extracted drawing method; example unit tests

parent 4a9cd21f
...@@ -104,7 +104,6 @@ class SemanticLinking: ...@@ -104,7 +104,6 @@ class SemanticLinking:
weight1.append(self.G.number_of_edges(labelvalues[row], labelvalues[row1])) weight1.append(self.G.number_of_edges(labelvalues[row], labelvalues[row1]))
print("The number of coccurance from node ", labelvalues[row],"to node ", labelvalues[row1], ": ", weight1[row1]) print("The number of coccurance from node ", labelvalues[row],"to node ", labelvalues[row1], ": ", weight1[row1])
#self.G.__setattr__('weight', weight1)
self.G.weight = weight1 self.G.weight = weight1
return weight1 return weight1
...@@ -138,15 +137,11 @@ class SemanticLinking: ...@@ -138,15 +137,11 @@ class SemanticLinking:
self.G.color = node_adjacencies self.G.color = node_adjacencies
return node_adjacencies return node_adjacencies
def result(self, weight1, labeling, node_adjacencies): def print_metrics(self, weight1, labeling, node_adjacencies):
weigth = [] weigth = []
edge_width = [] edge_width = []
plt.figure(figsize=(25, 25)) plt.figure(figsize=(25, 25))
options = {
'with_labels': True,
'font_weight': 'regular',
}
# colors = [color_map[G.node[node][1]] for node in G] # colors = [color_map[G.node[node][1]] for node in G]
# sizes = [G.node[node]['Timestamp'] * 10 for node in G] # sizes = [G.node[node]['Timestamp'] * 10 for node in G]
...@@ -159,14 +154,14 @@ class SemanticLinking: ...@@ -159,14 +154,14 @@ class SemanticLinking:
edge_width.append([]) edge_width.append([])
for drow in range(len(weigth[row])): for drow in range(len(weigth[row])):
edge_width[row].append(weigth[row][drow]) edge_width[row].append(weigth[row][drow])
node_size = [v * 80 for v in d.values()] # setting node size based on node centrality
edge_width = [row * 0.5 for row in weight1] edge_width = [row * 0.5 for row in weight1]
print("Nodes' Degree: ", nx.degree(self.G)) print("Nodes' Degree: ", nx.degree(self.G))
print("Nodes' Betweeness ", nx.edge_betweenness_centrality(self.G)) print("Nodes' Betweeness ", nx.edge_betweenness_centrality(self.G))
print("Nodes' Betweeness-centrality: ", nx.betweenness_centrality(self.G)) print("Nodes' Betweeness-centrality: ", nx.betweenness_centrality(self.G))
# TODO extract method def draw_edges(self, weight1, labeling, node_adjacencies):
""" """
Using the spring layout : Using the spring layout :
- k controls the distance between the nodes and varies between 0 and 1 - k controls the distance between the nodes and varies between 0 and 1
...@@ -175,6 +170,13 @@ class SemanticLinking: ...@@ -175,6 +170,13 @@ class SemanticLinking:
""" """
labels2 = {} labels2 = {}
options = {
'with_labels': True,
'font_weight': 'regular',
}
d = nx.degree_centrality(self.G)
node_size = [v * 80 for v in d.values()] # setting node size based on node centrality
for idx, edge in enumerate(self.G.edges): for idx, edge in enumerate(self.G.edges):
labels2[edge] = "s" labels2[edge] = "s"
...@@ -203,9 +205,9 @@ class SemanticLinking: ...@@ -203,9 +205,9 @@ class SemanticLinking:
node_adjacencies = self.findigneighbors() node_adjacencies = self.findigneighbors()
self.result(weight1, labeling, node_adjacencies) self.print_metrics(weight1, labeling, node_adjacencies)
self.draw_edges(weight1, labeling, node_adjacencies)
if __name__ == '__main__': if __name__ == '__main__':
linking = SemanticLinking() linking = SemanticLinking()
......
# add modules folder to interpreter path
import sys
import os
modules_paths = ['../app/', '../../../modules/']
for path in modules_paths:
if os.path.exists(path):
sys.path.insert(1, path)
print(f"added {path}")
import unittest
import manage_sys_paths
import json
from initialdemo.HyperGraph import HyperGraph
class Test_HyperGraph(unittest.TestCase):
hypergraph: HyperGraph = None
def setUp(self):
self.hypergraph = HyperGraph()
def test_removeDuplicates_noDupOrdered_sameContent(self):
list_ = [[1,2,3]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual(list_, set_)
def test_removeDuplicates_oneDupOrdered_removed(self):
list_ = [[1,2,3,3]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,3]], set_)
def test_removeDuplicates_multDupOrdered_allRemoved(self):
list_ = [[1,1,2,3,3,4]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,3,4]], set_)
def test_removeDuplicates_noDupUnordered_sameContent(self):
list_ = [[1,2,3,5,9,4,30,15]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual(list_, set_)
def test_removeDuplicates_oneDupUnordered_removed(self):
list_ = [[1,2,3,5,9,4,30,5,15]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,3,5,9,4,30,15]], set_)
def test_removeDuplicates_multDupUnordered_allRemoved(self):
list_ = [[1,2,5,3,1,70,25,-1,7,-1]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,5,3,70,25,-1,7]], set_)
def test_removeDuplicates_oneDupOrderedMultDim_removed(self):
list_ = [[1,1,2],[2,2,3]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2],[2,3]], set_)
def test_removeDuplicates_multDupOrderedMultDim_allRemoved(self):
list_ = [[1,1,2,3,3],[2,2,3,4,4,5]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,3],[2,3,4,5]], set_)
def test_removeDuplicates_multDupUnorderedMultDim_allRemoved(self):
list_ = [[1,2,5,2,7,3],[-10,5,3,20,-10,-7]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,5,7,3],[-10,5,3,20,-7]], set_)
def test_removeDuplicates_multDupUnorderedMultDim2_allRemoved(self):
list_ = [[1,2,5,2,7,3],[-10,5,3,20,-10,-7],[1,2]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual([[1,2,5,7,3],[-10,5,3,20,-7],[1,2]], set_)
def test_removeDuplicates_multDupUnorderedTripleDim_noDupRemoved(self):
list_ = [[[1,2,5,2,7,3],[-10,5,3,20,-10,-7],[1,2]]]
set_ = self.hypergraph.remove_duplicates(list_)
self.assertEqual(list_, set_)
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment