Commit 0ba62690 authored by Manuel's avatar Manuel

Merge remote-tracking branch 'origin/staging' into bugfix/cors

parents 9921dd27 e7b081ad
......@@ -4,7 +4,6 @@ import importlib.util
import pathlib
import shutil
import re
'''
This script searches for all 'tests/' directories and executes all tests
by cd'ing into the dir and executing unittest discover.
......@@ -12,16 +11,21 @@ It additionally installs all dependencies from a '../requirements.txt' via pip.
Use command line argument '-w' to run on windows.
'''
PY = sys.argv[2] if (len(sys.argv) > 1 and sys.argv[1] == '-py') else 'python' # use -py to use your own python command
PY = sys.argv[2] if (len(sys.argv) > 1 and sys.argv[1] == '-py') else 'python3.7' # use -py to use your own python command
ROOT = pathlib.Path(__file__).parent.parent.absolute()
REPORTS = ROOT / 'reports'
TESTS_FOLDER_NAME = os.path.normpath("/tests")
print("Creating VENV")
os.system(f"{PY} -m venv venv")
PY = f"~/smart/venv/bin/{PY}"
print("\nSearching for tests at the path: "+ str(ROOT))
count = 0
resultCodeList = []
coverage_paths_set = set()
microservice_coverage_paths_set = set()
for (dirname, dirs, files) in os.walk(ROOT):
#I assume all the tests are placed in a folder named "tests"
......@@ -37,8 +41,8 @@ for (dirname, dirs, files) in os.walk(ROOT):
exit_val = os.system(f"{PY} -m pip install -r ../requirements.txt") # install pip dependencies
#resultCodeList.append(exit_val)
#exit_val = os.system(f"{PY} -m unittest discover") # execute the tests
exit_val = os.system(f"python3.7 -m coverage run --append --omit=*/site-packages*,*/dist-packages* -m unittest discover") #TEST CODE COVERAGE
coverage_paths_set.add(os.path.normpath(dirname))
exit_val = os.system(f"{PY} -m coverage run --append --omit=*/site-packages*,*/dist-packages* -m unittest discover") #TEST CODE COVERAGE
microservice_coverage_paths_set.add(os.path.normpath(dirname))
resultCodeList.append(exit_val) #once per folder i.e if 3 tests are in a folder and crash, there will be just one exit val
except Exception as e:
......@@ -56,8 +60,8 @@ for (dirname, dirs, files) in os.walk(ROOT):
#cur_dir = os.path(dirname).parent()
os.chdir(cur_dir)
print(f"Executing coverage test in {cur_dir}")
exit_val = os.system(f"python3.7 -m coverage run --append --omit=*/site-packages* -m unittest discover")
coverage_paths_set.add(os.path.normpath(cur_dir))
exit_val = os.system(f"{PY} -m coverage run --append --omit=*/site-packages* -m unittest discover")
microservice_coverage_paths_set.add(os.path.normpath(cur_dir))
except Exception as e:
print(e)
......@@ -83,7 +87,7 @@ except Exception as e:
print("Combinging coverages")
counter = 0
for path in coverage_paths_set:
for path in microservice_coverage_paths_set:
try:
path += '/.coverage'
original = os.path.normpath( path )
......@@ -96,11 +100,20 @@ for path in coverage_paths_set:
except Exception as e:
print(e)
continue
try:
coverage_xml_path = os.path.normpath( str(REPORTS) + '/coverage.xml')
os.remove(coverage_xml_path)
#coverage_html_path = os.path.normpath( str(REPORTS) + '/htmlcov' )
#os.rmdir(coverage_html_path)
except Exception as e:
print(e)
print("Generating Combined report")
os.system("python3.7 -m coverage combine")
os.system("python3.7 -m coverage xml")
os.system("python3.7 -m coverage html") #if you want to generate the html as well
os.system(f"{PY} -m coverage combine")
os.system(f"{PY} -m coverage xml")
os.system(f"{PY} -m coverage html") #if you want to generate the html as well
firstError = -1
i = 0
......
......@@ -24,6 +24,7 @@ This token is used for authentication as _regular user_ on all microservices cur
```
{
"ApplicationType": "use-case identifier as string",
"docType": "use-case-table identifier as string",
"key": "value",
...
}
......@@ -91,4 +92,4 @@ Returns the computed similarity. Two clusters belonging to the SAME layer will b
## Connected Cluster
Intermediary data-structure used only by the function which computes the similarity. Clusters are connected only to other clusters belonging to a DIFFERENT layer.
```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables{table}/connectedClusters``` returns all connected clusters for the given use-case and table.
```GET https://articonf1.itec.aau.at:30103/api/use_cases/{use_case}/tables/{table}/connectedClusters``` returns all connected clusters for the given use-case and table.
......@@ -3,7 +3,9 @@ import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
......
try:
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
# python -m unittest discover
from db.entities import Cluster
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
from datetime import date, datetime
import json
# python -m unittest discover
from db.entities import Cluster
from datetime import date, datetime
import json
class TestCluster(unittest.TestCase):
def test_init_Cluster(self):
try:
c = Cluster('debug', 'debug-table1', 'layer1', 1, [1, 2, 3])
self.assertEqual('debug', c.use_case)
self.assertEqual('debug-table1', c.use_case_table)
self.assertEqual(1, c.cluster_label)
self.assertEqual([1, 2, 3], c.nodes)
except Exception as e:
print(e)
class TestCluster(unittest.TestCase):
def test_init_Cluster(self):
c = Cluster('debug', 'debug-table1', 'layer1', 1, [1, 2, 3])
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
self.assertEqual('debug', c.use_case)
self.assertEqual('debug-table1', c.use_case_table)
self.assertEqual(1, c.cluster_label)
self.assertEqual([1, 2, 3], c.nodes)
if __name__ == '__main__':
unittest.main()
try:
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
# python -m unittest discover
from processing.clustering import ClusterResultConverter, ClusterResult
from typing import List, Dict, Any
class TestClusterResult(unittest.TestCase):
converter:ClusterResultConverter = None
def setUp(self):
try:
self.converter = ClusterResultConverter()
except Exception as e:
print (e)
def test_result_undefined_feature(self):
try:
cluster_groups = self._get_some_cluster_groups_1d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=[]
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels(['n.a.','n.a.','n.a.'], cluster_res)
except Exception as e:
print (e)
def test_result_1d_feature(self):
try:
cluster_groups = self._get_some_cluster_groups_1d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=['v']
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels(['-1.0 -- 1.0','10.0 -- 11.0','2.0 -- 2.0'], cluster_res)
except Exception as e:
print (e)
def test_result_2d_features(self):
try:
cluster_groups = self._get_some_cluster_groups_2d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=['v', 'u']
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels([str((0.0,0.0)), str((10.5,10.5)), str((2.0,2.0)), str((3.0,6.0))], cluster_res)
except Exception as e:
print (e)
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
# python -m unittest discover
from processing.clustering import ClusterResultConverter, ClusterResult
from typing import List, Dict, Any
class TestClusterResult(unittest.TestCase):
converter:ClusterResultConverter = None
def setUp(self):
self.converter = ClusterResultConverter()
def test_result_undefined_feature(self):
cluster_groups = self._get_some_cluster_groups_1d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=[]
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels(['n.a.','n.a.','n.a.'], cluster_res)
def test_result_1d_feature(self):
cluster_groups = self._get_some_cluster_groups_1d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=['v']
)
#region Custom Assertions
def assert_correct_cluster_result_len(self, expected: 'original dict of lists', actual: Dict[Any, ClusterResult]):
try:
self.assertEqual(len(expected), len(actual))
for i in range(len(expected)):
self.assertEqual(len(expected[i]), len(actual[i].nodes))
self.assertEqual(expected[i], actual[i].nodes)
except Exception as e:
print (e)
def assert_correct_cluster_result_labels(self, expected: List[str], actual: Dict[Any, ClusterResult]):
try:
self.assertEqual(len(expected), len(actual))
for i in range(len(expected)):
self.assertEqual(expected[i], actual[i].label)
except Exception as e:
print (e)
#endregion Custom Assertions
#region helper methods
def _get_some_cluster_groups_1d(self):
return {
0: [{'v':'0'}, {'v':'1'}, {'v':'-1'}],
1: [{'v':'10'}, {'v':'11'}],
2: [{'v':'2'}],
}
def _get_some_cluster_groups_2d(self):
return {
0: [{'v':'0', 'u':'0'}, {'v':'1', 'u':'1'}, {'v':'-1', 'u':'-1'}],
1: [{'v':'10', 'u':'10'}, {'v':'11', 'u':'11'}],
2: [{'v':'2', 'u':'2'}],
3: [{'v':'7', 'u':'7'}, {'v':'5', 'u':'3'}, {'v':'-3', 'u':'8'}],
}
#endregion helper methods
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels(['-1.0 -- 1.0','10.0 -- 11.0','2.0 -- 2.0'], cluster_res)
def test_result_2d_features(self):
cluster_groups = self._get_some_cluster_groups_2d()
cluster_res = self.converter.convert_to_cluster_results(
cluster_groups=cluster_groups,
features=['v', 'u']
)
self.assert_correct_cluster_result_len(cluster_groups, cluster_res)
self.assert_correct_cluster_result_labels([str((0.0,0.0)), str((10.5,10.5)), str((2.0,2.0)), str((3.0,6.0))], cluster_res)
#region Custom Assertions
def assert_correct_cluster_result_len(self, expected: 'original dict of lists', actual: Dict[Any, ClusterResult]):
self.assertEqual(len(expected), len(actual))
for i in range(len(expected)):
self.assertEqual(len(expected[i]), len(actual[i].nodes))
self.assertEqual(expected[i], actual[i].nodes)
def assert_correct_cluster_result_labels(self, expected: List[str], actual: Dict[Any, ClusterResult]):
self.assertEqual(len(expected), len(actual))
for i in range(len(expected)):
self.assertEqual(expected[i], actual[i].label)
#endregion Custom Assertions
#region helper methods
def _get_some_cluster_groups_1d(self):
return {
0: [{'v':'0'}, {'v':'1'}, {'v':'-1'}],
1: [{'v':'10'}, {'v':'11'}],
2: [{'v':'2'}],
}
def _get_some_cluster_groups_2d(self):
return {
0: [{'v':'0', 'u':'0'}, {'v':'1', 'u':'1'}, {'v':'-1', 'u':'-1'}],
1: [{'v':'10', 'u':'10'}, {'v':'11', 'u':'11'}],
2: [{'v':'2', 'u':'2'}],
3: [{'v':'7', 'u':'7'}, {'v':'5', 'u':'3'}, {'v':'-3', 'u':'8'}],
}
#endregion helper methods
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
try:
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
# python -m unittest discover
from processing.clustering import Clusterer, ClusterResult
import numpy as np
from typing import List, Dict, Any
class TestClusterer(unittest.TestCase):
clusterer:Clusterer = None
# python -m unittest discover
from processing.clustering import Clusterer, ClusterResult
import numpy as np
from typing import List, Dict, Any
def setUp(self):
self.clusterer = Clusterer(min_points=2)
class TestClusterer(unittest.TestCase):
clusterer:Clusterer = None
#region _extract_features
def setUp(self):
self.clusterer = Clusterer(min_points=2)
def test_extract_features_emptyDataset_noResults(self):
features = self.clusterer._extract_features(dataset=[], features=['test'])
np.testing.assert_equal(np.asarray([]), features)
#region _extract_features
def test_extract_features_emptyFeatures_singleEmptyResult(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}], features=[])
np.testing.assert_equal(np.asarray([[]]), features)
def test_extract_features_emptyDataset_noResults(self):
features = self.clusterer._extract_features(dataset=[], features=['test'])
np.testing.assert_equal(np.asarray([]), features)
def test_extract_features_singleFeature_Projection(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}], features=['a'])
np.testing.assert_equal(np.asarray([[1]]), features)
def test_extract_features_emptyFeatures_singleEmptyResult(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}], features=[])
np.testing.assert_equal(np.asarray([[]]), features)
def test_extract_features_singleFeature_Projection_2(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}, {'a':3, 'b':4}], features=['a'])
np.testing.assert_equal(np.asarray([[1], [3]]), features)
def test_extract_features_multFeature_Projection(self):
features = self.clusterer._extract_features(dataset=[{'a':0, 'b':2, 'c':4}, {'a':1, 'b':3, 'c':5}], features=['a','c'])
np.testing.assert_equal(np.asarray([[0,4], [1,5]]), features)
def test_extract_features_singleFeature_Projection(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}], features=['a'])
np.testing.assert_equal(np.asarray([[1]]), features)
def test_extract_features_singleFeature_Projection_2(self):
features = self.clusterer._extract_features(dataset=[{'a':1, 'b':2}, {'a':3, 'b':4}], features=['a'])
np.testing.assert_equal(np.asarray([[1], [3]]), features)
#endregion _extract_features
def test_extract_features_multFeature_Projection(self):
features = self.clusterer._extract_features(dataset=[{'a':0, 'b':2, 'c':4}, {'a':1, 'b':3, 'c':5}], features=['a','c'])
np.testing.assert_equal(np.asarray([[0,4], [1,5]]), features)
#endregion _extract_features
#region create_labels
#region create_labels
def test_create_labels_noneInput_noneOutput(self):
labels = self.clusterer.create_labels(None)
self.assertEqual(None, labels)
def test_create_labels_noneInput_noneOutput(self):
labels = self.clusterer.create_labels(None)
self.assertEqual(None, labels)
def test_create_labels_emptyInput_emptyOutput(self):
labels = self.clusterer.create_labels([])
self.assertEqual([], labels)
def test_create_labels_singleInput_noise(self):
clusterer = Clusterer(min_points=1)
def test_create_labels_emptyInput_emptyOutput(self):
labels = self.clusterer.create_labels([])
self.assertEqual([], labels)
def test_create_labels_singleInput_noise(self):
clusterer = Clusterer(min_points=1)
features = clusterer._extract_features(dataset=[self.location(1,2)], features=self.get_location_features())
labels = clusterer.create_labels(features)
features = clusterer._extract_features(dataset=[self.location(1,2)], features=self.get_location_features())
labels = clusterer.create_labels(features)
self.assertEqual(1, len(labels))
self.assertEqual(-1, labels[0])
self.assertEqual(1, len(labels))
self.assertEqual(-1, labels[0])
def test_create_labels_tooSmallInputForMinPtsHyperparameter_error(self):
clusterer = Clusterer(min_points=3)
def test_create_labels_tooSmallInputForMinPtsHyperparameter_error(self):
clusterer = Clusterer(min_points=3)
features = clusterer._extract_features(dataset=[self.location(1,2), self.location(1,2)], features=self.get_location_features())
with self.assertRaises(ValueError):
# Fails because (min_pts > |input elements|)
clusterer.create_labels(features)
features = clusterer._extract_features(dataset=[self.location(1,2), self.location(1,2)], features=self.get_location_features())
with self.assertRaises(ValueError):
# Fails because (min_pts > |input elements|)
clusterer.create_labels(features)
def test_create_labels_nearInputs_singleCluster(self):
locations = [self.location(1,2), self.location(2,2)]
def test_create_labels_nearInputs_singleCluster(self):
locations = [self.location(1,2), self.location(2,2)]
features = self.clusterer._extract_features(dataset=locations, features=self.get_location_features())
labels = self.clusterer.create_labels(features)
features = self.clusterer._extract_features(dataset=locations, features=self.get_location_features())
labels = self.clusterer.create_labels(features)
self.assertEqual(2, len(labels))
self.assertEqual(labels[0], labels[1])
self.assertEqual(2, len(labels))
self.assertEqual(labels[0], labels[1])
def test_create_labels_nearInputs_twoClusters(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20), self.location(20,23)]
def test_create_labels_nearInputs_twoClusters(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20), self.location(20,23)]
features = self.clusterer._extract_features(dataset=locations, features=self.get_location_features())
labels = self.clusterer.create_labels(features)
features = self.clusterer._extract_features(dataset=locations, features=self.get_location_features())
labels = self.clusterer.create_labels(features)
self.assertEqual(4, len(labels))
self.assertEqual(labels[0], labels[1])
self.assertEqual(labels[2], labels[3])
self.assertNotEqual(labels[0], labels[2])
self.assertEqual(4, len(labels))
self.assertEqual(labels[0], labels[1])
self.assertEqual(labels[2], labels[3])
self.assertNotEqual(labels[0], labels[2])
#endregion create_labels
#endregion create_labels
#region label_dataset
#region label_dataset
def test_label_dataset_NoneLocations_NoException(self):
self.clusterer.label_dataset(None, [])
def test_label_dataset_NoneLocations_NoException(self):
self.clusterer.label_dataset(None, [])
def test_label_dataset_NoneLabels_NoException(self):
self.clusterer.label_dataset([], None)
def test_label_dataset_NoneLabels_NoException(self):
self.clusterer.label_dataset([], None)
def test_label_dataset_emptyInput_emptyOutput(self):
locations = []
self.clusterer.label_dataset(locations, [])
self.assertEqual(0, len(locations))
def test_label_dataset_emptyInput_emptyOutput(self):
locations = []
self.clusterer.label_dataset(locations, [])
self.assertEqual(0, len(locations))
def test_label_dataset_diffInputLengths_ValueError_1(self):
with self.assertRaises(ValueError):
self.clusterer.label_dataset([], [1])
def test_label_dataset_diffInputLengths_ValueError_1(self):
with self.assertRaises(ValueError):
self.clusterer.label_dataset([], [1])
def test_label_dataset_diffInputLengths_ValueError_2(self):
with self.assertRaises(ValueError):
self.clusterer.label_dataset([self.location(1,2)], [])
def test_label_dataset_diffInputLengths_ValueError_2(self):
with self.assertRaises(ValueError):
self.clusterer.label_dataset([self.location(1,2)], [])
def test_label_dataset_multInput_correctlyLabeled(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20)]
labels = [17,2,20]
def test_label_dataset_multInput_correctlyLabeled(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20)]
labels = [17,2,20]
self.clusterer.label_dataset(locations, labels)
self.clusterer.label_dataset(locations, labels)
self.assertEqual(3, len(locations))
self.assertHaveLabelsAsNewKey(locations, labels)
self.assertEqual(3, len(locations))
self.assertHaveLabelsAsNewKey(locations, labels)
#endregion label_dataset
#endregion label_dataset
#region cluster_dataset
#region cluster_dataset
def test_cluster_dataset_locationsMultInput_correctlyLabeled(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20), self.location(20,21)]
labels = [0,0,1,1]
exp_res = {0:locations[0:2], 1:locations[2:4]}
def test_cluster_dataset_locationsMultInput_correctlyLabeled(self):
locations = [self.location(1,2), self.location(2,2), self.location(20,20), self.location(20,21)]
labels = [0,0,1,1]
exp_res = {0:locations[0:2], 1:locations[2:4]}
res = self.clusterer.cluster_dataset(locations, self.get_location_features())
res = self.clusterer.cluster_dataset(locations, self.get_location_features())
self.assertHaveLabelsAsNewKey(locations, labels)
self.assertClusteringResult(exp_res, res)
self.assertHaveLabelsAsNewKey(locations, labels)
self.assertClusteringResult(exp_res, res)
def test_cluster_dataset_timesMultInput_correctlyLabeled(self):
times = [self.time(123), self.time(128), self.time(223), self.time(225)]
labels = [0,0,1,1]
exp_res = {0:times[0:2], 1:times[2:4]}
def test_cluster_dataset_timesMultInput_correctlyLabeled(self):
times = [self.time(123), self.time(128), self.time(223), self.time(225)]
labels = [0,0,1,1]
exp_res = {0:times[0:2], 1:times[2:4]}
res = self.clusterer.cluster_dataset(times, self.get_time_features())
self.assertHaveLabelsAsNewKey(times, labels)
self.assertClusteringResult(exp_res, res)
res = self.clusterer.cluster_dataset(times, self.get_time_features())
self.assertHaveLabelsAsNewKey(times, labels)
self.assertClusteringResult(exp_res, res)
def test_cluster_dataset_locationsMultInput_correctlyLabeled_2(self):
clusterer = Clusterer(3)
locations = [
self.location(1,2), self.location(2,2), self.location(2,2),
self.location(20,20), self.location(20,21), self.location(20,20),
self.location(50,50),
self.location(50,1), self.location(50,2), self.location(50,-1)
]
labels = [0,0,0,1,1,1,-1,2,2,2]
exp_res = {0:locations[0:3], 1:locations[3:6], -1:locations[6:7], 2:locations[7:10]}
def test_cluster_dataset_locationsMultInput_correctlyLabeled_2(self):
clusterer = Clusterer(3)
locations = [
self.location(1,2), self.location(2,2), self.location(2,2),
self.location(20,20), self.location(20,21), self.location(20,20),
self.location(50,50),
self.location(50,1), self.location(50,2), self.location(50,-1)
]
labels = [0,0,0,1,1,1,-1,2,2,2]
exp_res = {0:locations[0:3], 1:locations[3:6], -1:locations[6:7], 2:locations[7:10]}
res = clusterer.cluster_dataset(locations, self.get_location_features())
res = clusterer.cluster_dataset(locations, self.get_location_features())
self.assertHaveLabelsAsNewKey(locations, labels)
self.assertClusteringResult(exp_res, res)
self.assertHaveLabelsAsNewKey(locations, labels)
self.assertClusteringResult(exp_res, res)
#endregion cluster_dataset
#endregion cluster_dataset
#region helper methods
#region helper methods
def location(self, lat, long_) -> dict:
return {'latitude': lat, 'longitude':long_}
def location(self, lat, long_) -> dict:
return {'latitude': lat, 'longitude':long_}
def get_location_features(self):
return ['latitude', 'longitude']
def get_location_features(self):
return ['latitude', 'longitude']
def time(self, ts) -> dict:
return {'timestamp': ts}
def time(self, ts) -> dict:
return {'timestamp': ts}
def get_time_features(self):
return ['timestamp']
def get_time_features(self):
return ['timestamp']
def assertHaveLabelsAsNewKey(self, locations, labels):
self.assertEqual(len(labels), len(locations))
def assertHaveLabelsAsNewKey(self, locations, labels):
self.assertEqual(len(labels), len(locations))
for i in range(len(locations)):
self.assertEqual(labels[i], locations[i]['cluster_label'])
for i in range(len(locations)):
self.assertEqual(labels[i], locations[i]['cluster_label'])
def assertClusteringResult(self, expected: Dict[Any, List], actual: Dict[Any, ClusterResult]):
self.assertEqual(len(expected), len(actual))
def assertClusteringResult(self, expected: Dict[Any, List], actual: Dict[Any, ClusterResult]):
self.assertEqual(len(expected), len(actual))
for k in expected.keys():
if k not in actual:
self.fail(f"Cluster key ({k}, {type(k)}) not in result.")
self.assertListEqual(expected[k], actual[k].nodes)
#endregion helper methods
for k in expected.keys():
if k not in actual:
self.fail(f"Cluster key ({k}, {type(k)}) not in result.")
self.assertListEqual(expected[k], actual[k].nodes)
#endregion helper methods
if __name__ == '__main__':
unittest.main()
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
try:
import unittest
import unittest
import sys
for path in ['../', './', '../../../modules/', '../../../../modules']:
sys.path.insert(1, path)
import sys
for path in ['../', './', '../../../modules/', '../../../../modules']:
sys.path.insert(1, path)
from db.entities.connected_node import NodeC
from db.entities.connected_cluster import ClusterC
from db.entities.connected_layer import LayerC
from db.entities.connected_node import NodeC
from db.entities.connected_cluster import ClusterC
from db.entities.connected_layer import LayerC
import math
import datetime
from typing import Dict
from typing import Dict
from processing.similarityFiles.dataInput import *
from processing.similarityFiles.calculateWeights import *
from processing.similarityFiles.calculateSimilarity import *
from processing.similarityFiles.miscFunctions import *
import math
import datetime
from typing import Dict
from typing import Dict
from processing.similarityFiles.dataInput import *
from processing.similarityFiles.calculateWeights import *
from processing.similarityFiles.calculateSimilarity import *
from processing.similarityFiles.miscFunctions import *
import json
import json
class TestSimilarity(unittest.TestCase):
'''Tests the similarity calculation which works without object orientation.'''
class TestSimilarity(unittest.TestCase):
'''Tests the similarity calculation which works without object orientation.'''
def test_integration_similarityCalculation(self):
'''
Only for testing, can be deleted at any time.\n
Served as a testing example to make sure the computations are correct
'''
def test_integration_calculateSimilarity_ClustersDict_CorrectValue(self):
'''
Only for testing, can be deleted at any time.\n
Served as a testing example to make sure the computations are correct
'''
limitNrNodes = 100000
limitNrNodes = 100000
layerDict = dict()
("Creating Connected_cluster dict and similarity dict")
inputLayerLocation=getTestLocationLayerData()
inputLayerTime=getTestTimeLayerData()
inputLayerPrice=getTestPriceLayerData()
layerDict = populateWithNewNodesSingleLayer(inputLayerLocation,layerDict,limitNrNodes)
layerDict = populateWithNewNodesSingleLayer(inputLayerTime,layerDict,limitNrNodes)
layerDict = populateWithNewNodesSingleLayer(inputLayerPrice,layerDict,limitNrNodes)
layerDict = dict()
("Creating Connected_cluster dict and similarity dict")
inputLayerLocation=getTestLocationLayerData()
inputLayerTime=getTestTimeLayerData()
inputLayerPrice=getTestPriceLayerData()
layerDict = populateWithNewNodesSingleLayer(inputLayerLocation,layerDict,limitNrNodes)
layerDict = populateWithNewNodesSingleLayer(inputLayerTime,layerDict,limitNrNodes)
layerDict = populateWithNewNodesSingleLayer(inputLayerPrice,layerDict,limitNrNodes)
layerDict = calculateWeights(layerDict)
similarityDict = calculateSimilarity(layerDict)
("Asserting if the calculated values are true")
layerDict = calculateWeights(layerDict)
similarityDict = calculateSimilarity(layerDict)
("Asserting if the calculated values are true")
#assert x == "hello"
#assert x == "hello"
# SYNTAX:
#similarityDict[(clusterLabel1,clusterLabel2,layerOfTheClusters)][layerToWhichTheClustersAreCompared] == ExpectedSimilarityValue
#checking if the cluster "1" and cluster "2" from the "Location" layer have the sqrt(2) similarity values for when compared with 'Price'and 'Time'layers
self.assertEqual(similarityDict[(1,2,'Location')]['Price'], math.sqrt(2))
self.assertEqual(similarityDict[(1,2,'Location')]['Time'], math.sqrt(2))
self.assertEqual(similarityDict[(1,3,'Location')]['Price'], math.sqrt(10))
self.assertEqual(similarityDict[(1,3,'Location')]['Time'], math.sqrt(16))
self.assertEqual(similarityDict[(2,3,'Location')]['Price'], math.sqrt(4))
self.assertEqual(similarityDict[(2,3,'Location')]['Time'], math.sqrt(10))
self.assertEqual(similarityDict[(4,5,'Time')]['Location'], math.sqrt(19))
self.assertEqual(similarityDict[(4,5,'Time')]['Price'], math.sqrt(26))
self.assertEqual(similarityDict[(6,7,'Price')]['Location'], math.sqrt(3))
self.assertEqual(similarityDict[(6,7,'Price')]['Time'], math.sqrt(8))
# assert similarityDict[(1,2,'Location')]
("Test Passed Succesfully")
return layerDict
# SYNTAX:
#similarityDict[(clusterLabel1,clusterLabel2,layerOfTheClusters)][layerToWhichTheClustersAreCompared] == ExpectedSimilarityValue
#checking if the cluster "1" and cluster "2" from the "Location" layer have the sqrt(2) similarity values for when compared with 'Price'and 'Time'layers
self.assertEqual(similarityDict[(1,2,'Location')]['Price'], math.sqrt(2))
self.assertEqual(similarityDict[(1,2,'Location')]['Time'], math.sqrt(2))
self.assertEqual(similarityDict[(1,3,'Location')]['Price'], math.sqrt(10))
self.assertEqual(similarityDict[(1,3,'Location')]['Time'], math.sqrt(16))
self.assertEqual(similarityDict[(2,3,'Location')]['Price'], math.sqrt(4))
self.assertEqual(similarityDict[(2,3,'Location')]['Time'], math.sqrt(10))
self.assertEqual(similarityDict[(4,5,'Time')]['Location'], math.sqrt(19))
self.assertEqual(similarityDict[(4,5,'Time')]['Price'], math.sqrt(26))
self.assertEqual(similarityDict[(6,7,'Price')]['Location'], math.sqrt(3))
self.assertEqual(similarityDict[(6,7,'Price')]['Time'], math.sqrt(8))
# assert similarityDict[(1,2,'Location')]
("Test Passed Succesfully")
return layerDict
def getTestLocationLayerData():
inputLayerLocation = [
{
"cluster_label": 1,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "aaa",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 2,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156b",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "bbb",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 3,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156c",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "cc",
"UserID": "asdf"
}
]
}
]
return inputLayerLocation
def getTestTimeLayerData():
inputLayerTime = [
def getTestLocationLayerData():
inputLayerLocation = [
{
"cluster_label": 4,
"layer_name": "Time",
"cluster_label": 1,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
......@@ -227,37 +95,19 @@ try:
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "aaaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UniqueID": "aa",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UniqueID": "aaa",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
......@@ -265,50 +115,20 @@ try:
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "bbb",
"UniqueID": "aaaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 5,
"layer_name": "Time",
"cluster_label": 2,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156f",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
}
]
return inputLayerTime
def getTestPriceLayerData():
inputLayerPrice = [
{
"cluster_label": 6,
"layer_name": "Price",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156h",
"TravelID": "5e57ec9159bc0668543f156b",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
......@@ -317,27 +137,19 @@ try:
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "aaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UniqueID": "bbb",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
......@@ -345,61 +157,245 @@ try:
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaaa",
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 7,
"layer_name": "Price",
"cluster_label": 3,
"layer_name": "Location",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156g",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "5e57ec9159bc0668543f156c",
"TravelPrice": 15,
"UniqueID": "aa",
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UniqueID": "c",
"UserID": "asdf"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelID": "asd",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
"UniqueID": "cc",
"UserID": "asdf"
}
]
}
]
return inputLayerPrice
return inputLayerLocation
def getTestTimeLayerData():
inputLayerTime = [
{
"cluster_label": 4,
"layer_name": "Time",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "bbb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 5,
"layer_name": "Time",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156f",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
}
]
return inputLayerTime
def getTestPriceLayerData():
inputLayerPrice = [
{
"cluster_label": 6,
"layer_name": "Price",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156h",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "bb",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aaaa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
},
{
"cluster_label": 7,
"layer_name": "Price",
"nodes": [
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156g",
"TravelPrice": 15,
"UniqueID": "a",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "aa",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "b",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
},
{
"Finished_time": 1576631193265951,
"Latitude_Destination": -5.973257,
"Longitude_Destination": 37.416316,
"TravelID": "5e57ec9159bc0668543f156d",
"TravelPrice": 15,
"UniqueID": "c",
"UserID": "a95075f5042b1b27060080156d87fe34ec7e712c"
}
]
}
]
return inputLayerPrice
if __name__ == '__main__':
unittest.main()
except Exception as e:
print ("Exception found:")
print (e)
\ No newline at end of file
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
......@@ -20,6 +20,7 @@ from env_info import is_running_locally, get_resources_path
from messaging.ReconnectingMessageManager import ReconnectingMessageManager
from messaging.MessageHandler import MessageHandler
from flask import request
from flask_cors import CORS
from flask import redirect
# init message handler
......@@ -30,6 +31,7 @@ def message_received_callback(channel, method, properties, body):
# load swagger config
app = connexion.App(__name__, specification_dir='configs/')
CORS(app.app)
@app.app.before_request
def before_request():
......
......@@ -10,6 +10,7 @@ connexion==2.7.0
coverage==5.3.1
cryptography==3.1
Flask==1.1.2
Flask-Cors==3.0.10
idna==2.9
importlib-metadata==1.6.1
inflection==0.5.0
......
......@@ -3,7 +3,9 @@ import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
......
......@@ -67,8 +67,8 @@ class Test_Pipeline(unittest.TestCase):
}
}
}
def testTraceProcessing(self):
#original name testTraceProcessing
def test_handle_new_trace_newTraceMsg_correctlyInserted(self):
msg = self._buildTraceMessage()
self.handler.handle_new_trace(msg["content"])
self.assertEqual(len(self.handler._repository.layernodes),1)
......
......@@ -9,7 +9,7 @@ for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video
from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc
from security.token_manager import TokenManager
......@@ -38,4 +38,4 @@ if __name__ == "__main__":
add_use_case(use_case)
add_user.main(use_case)
add_video.main(use_case)
\ No newline at end of file
add_change.main(use_case)
\ No newline at end of file
import sys
import os
from pathlib import Path
from typing import Dict, Any
import requests
modules_paths = ['.', '../../../modules/']
for modules_path in modules_paths:
if os.path.exists(modules_path):
sys.path.insert(1, modules_path)
from _add_use_case_scripts.vialog.tables import add_user, add_video, add_change
import network_constants as nc
from security.token_manager import TokenManager
def add_use_case(use_case: str):
#use_case = "vialog"
jwt = TokenManager.getInstance().getToken()
url = f"https://articonf1.itec.aau.at:30420/api/use-cases"
response = requests.post(
url,
verify=False,
proxies = { "http":None, "https":None },
headers = { "Authorization": f"Bearer {jwt}"},
json = {"name": use_case}
)
print(url+": "+str(response.content))
if __name__ == "__main__":
use_case = "vialog-new-enum"
# disable ssl warnings :)
requests.packages.urllib3.disable_warnings()
add_use_case(use_case)
add_video.main(use_case)
add_change.main(use_case)
\ No newline at end of file
from _add_use_case_scripts.requestPost import postLayersToSwagger, postTableToSwagger
def add_table(use_case: str, table_name: str):
'''
take the columns and add the mappings at the server
replace all "/"'s in the internal representation with a "_"
'''
columns = [
"changeType",
"changedValue",
"previousValue",
"newValue",
]
columns = { c : c for c in columns }
columns["UniqueID"] = "videoId+changeId"
columns["changeType"] = "enum(changeType)"
columns["changedValue"] = "enum(changedValue)"
table = {
"name": table_name,
"mappings": columns
}
postTableToSwagger(use_case,table)
def add_layers(use_case:str, table_name: str):
layers = [
{
"use_case": use_case,
"table": table_name,
"name": "Changetype_Layer",
"properties": [
"changeType",
],
"cluster_properties": [
"changeType",
]
},
{
"use_case": use_case,
"table": table_name,
"name": "Changedvalue_Layer",
"properties": [
"changedValue",
"previousValue",
"newValue"
],
"cluster_properties": [
"changedValue"
]
}
]
postLayersToSwagger(use_case,layers)
def main(use_case: str):
print("user")
table_name = "change"
add_table(use_case,table_name)
add_layers(use_case,table_name)
\ No newline at end of file
......@@ -6,27 +6,32 @@ def add_table(use_case: str, table_name: str):
'''
columns = [
# "docType",
"videoId",
"Video_Token",
"replyTo",
"Created",
"Duration",
"videoResolution",
"Label",
"ThreadId",
"Position",
"ModifiedDate",
"Views",
"ModeratedBy",
"CommunityManagerNotes",
"Rewards",
"Video_State",
"Video_Type"
'videoId',
'objType',
'eventName',
'video_token',
'replyTo',
'created',
'duration',
'videoResolution',
'label',
'threadId',
'position',
'views',
'moderatedBy',
'moderationDate',
'communityManagerNotes',
'rewards',
'video_state',
'video_type'
]
columns = { c : c for c in columns }
columns["UniqueID"] = "videoId"
columns["moderatedBy"] = "enum(moderatedBy)"
columns["video_state"] = "enum(video_state)"
columns["video_type"] = "enum(video_type)"
columns["videoResolution"] = "enum(videoResolution)"
table = {
"name": table_name,
......@@ -43,14 +48,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Manager_Layer",
"properties": [
"UniqueID",
"ModifiedDate",
"ModeratedBy",
"Video_State",
"Video_Type"
"moderationDate",
"moderatedBy",
"video_state",
"video_type"
],
"cluster_properties": [
"ModeratedBy",
"Video_State"
"moderatedBy",
"video_state"
]
},
{
......@@ -59,16 +64,16 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Popularity_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Views",
"Video_Type"
"views",
"video_type"
]
},
{
......@@ -77,15 +82,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Age_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Created"
"created"
]
},
{
......@@ -94,15 +99,15 @@ def add_layers(use_case:str, table_name: str):
"name": "Rewards_Layer",
"properties": [
"UniqueID",
"Label",
"Created",
"Views",
"Rewards",
"Video_State",
"Video_Type"
"label",
"created",
"views",
"rewards",
"video_state",
"video_type"
],
"cluster_properties": [
"Rewards"
"rewards"
]
},
{
......@@ -111,14 +116,14 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Lenght_Layer",
"properties": [
"UniqueID",
"Created",
"Views",
"Duration",
"Video_State",
"Video_Type"
"created",
"views",
"duration",
"video_state",
"video_type"
],
"cluster_properties": [
"Duration"
"duration"
]
},
{
......@@ -127,11 +132,11 @@ def add_layers(use_case:str, table_name: str):
"name": "Video_Resolution_Layer",
"properties": [
"UniqueID",
"Created",
"Views",
"created",
"views",
"videoResolution",
"Video_State",
"Video_Type"
"video_state",
"video_type"
],
"cluster_properties": [
"videoResolution"
......
......@@ -3,7 +3,9 @@ import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
......
......@@ -4,7 +4,7 @@ from db.entities.layer_adapter import LayerAdapter
class Test_Layer_Adapter(unittest.TestCase):
def test_valid_adapter(self):
def test_LayerAdapter_newLayerAdapterObj_validInstantiation(self):
adapter1 = LayerAdapter("layer1", "use_case", "table", ["a", "c"], ["a"])
print(adapter1.to_serializable_dict)
......
import requests
import json
videoListUrl = "https://dev758755.vialog.app/Videos/Meta/ListAll"
videoUrl = "https://dev758755.vialog.app/stat/events?type=video&id="
# token from Rest Gateway to authorize
JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wNC0wNyAxMjo0OTo0MS43MTkzNjQiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDQtMDggMTI6NDk6NDEuNzE5MzY0In0.FN6qqBQeJSmXtS0-0dBiL-ojz6Ou7E5Tc9macrrhM4A'
useCase = "vialog-new-enum"
def send_transaction_to_rest_gateway(transaction: dict):
res = requests.post(
url = 'https://articonf1.itec.aau.at:30401/api/trace',
json = transaction,
headers = {"Authorization": f"Bearer {JWT_TOKEN}"},
verify = False # ignore ssl error
)
print(res)
videosRequest = requests.get(videoListUrl)
empty = set()
html = set()
if (videosRequest.status_code != 200):
print(f"Status: {videosRequest.status_code}")
dataCount = 0
for video in videosRequest.json():
dataCount += 1
id = video["videoId"]
videoRequest = requests.get(f"{videoUrl}{id}")
if videoRequest.status_code != 200:
print(f"Status: {videoRequest.status_code}")
if videoRequest.text != "" and not videoRequest.text.startswith("<!DOCTYPE html>"):
print(f"\n\n{videoUrl}{id}")
historyList = sorted(videoRequest.json()[0]["History"], key=lambda k: k['moderationDate'], reverse=True)
historyList.append(empty)
i = 0
changeList = []
for eventMap in historyList:
if historyList[i+1] == empty:
break
if i == 0:
lastState = eventMap
lastState["ApplicationType"] = useCase
lastState["docType"] = "video"
print(f"{lastState}\n\n\n")
send_transaction_to_rest_gateway(lastState)
if eventMap["eventName"] == 'r1eabcbdc8f5378b2ba71a1b6fe2038b Created' or eventMap["eventName"] == 'Created':
change = {"changeType": "Created", "changedValue": "video_state", "previousValue": "", "newValue":"Created"}
elif eventMap["eventName"] == "Restore":
change = {"changeType": "Restore", "changedValue": "", "previousValue": "", "newValue":""}
elif eventMap["eventName"] == "PositionChange":
change = {"changeType": "PositionChange", "changedValue": "position", "previousValue": historyList[i+1]["position"], "newValue": eventMap["position"]}
elif eventMap["eventName"] == "Hide":
change = {"changeType": "Hide", "changedValue": "video_state", "previousValue": historyList[i+1]["video_state"], "newValue": eventMap["video_state"]}
elif eventMap["eventName"] == "Publish":
change = {"changeType": "Publish", "changedValue": "video_state", "previousValue": historyList[i+1]["video_state"], "newValue":eventMap["video_state"]}
elif eventMap["eventName"] == "CMNote":
change = {"changeType": "CMNote", "changedValue": "communityManagerNotes", "previousValue": historyList[i+1]["communityManagerNotes"], "newValue":eventMap["communityManagerNotes"]}
elif eventMap["eventName"] == "Move":
change = {"changeType": "Move", "changedValue": "position", "previousValue": historyList[i+1]["position"], "newValue":eventMap["position"]}
elif eventMap["eventName"] == "VideoType":
change = {"changeType": "VideoType", "changedValue": "video_type", "previousValue": historyList[i+1]["video_type"], "newValue":eventMap["video_type"]}
elif eventMap["eventName"] == "Delete":
change = {"changeType": "Delete", "changedValue": "", "previousValue": "", "newValue": ""}
elif eventMap["eventName"] == "Copy":
change = {"changeType": "Copy", "changedValue": "", "previousValue": "", "newValue":""}
elif eventMap["eventName"] == "CustomLabel":
change = {"changeType": "CustomLabel", "changedValue": "label", "previousValue": historyList[i+1]["label"], "newValue":eventMap["label"]}
change["videoId"] = id
change["changeId"] = i
change["timestamp"] = eventMap["moderationDate"]
change["ApplicationType"] = useCase
change["docType"] = "change"
print(change)
send_transaction_to_rest_gateway(change)
i += 1
elif videoRequest.text == "":
empty.add(id)
else:
html.add(id)
print(f"empty: {empty}\n\n")
print(f"html page: {html}\n\n")
print(f"history: {historyList}")
print(dataCount)
......@@ -2,6 +2,11 @@ import unittest
import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
......
......@@ -31,7 +31,9 @@ class Repository(MongoRepositoryBase):
@throws
KeyError - Duplicate transaction ID
'''
reference = self.get_transaction_with_id(transaction.id())
use_case= transaction.use_case
table = transaction.table
reference = self.get_transaction_with_id(transaction.id(),use_case, table)
if reference == None:
super().insert_entry(self._transaction_collection, transaction.to_serializable_dict())
else:
......@@ -41,10 +43,10 @@ class Repository(MongoRepositoryBase):
result = super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"use_case": use_case})
return [Transaction.from_serializable_dict(row) for row in list(result)]
def get_transaction_with_id(self, unique_id: str) -> Transaction:
result = list(super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"UniqueID": unique_id}))
def get_transaction_with_id(self, unique_id: str, use_case:str, table:str ) -> Transaction:
result = list(super().get_entries(self._transaction_collection, projection={'_id': False}, selection={"id": unique_id,"use_case": use_case, "table":table}))
if len(result) == 1:
if len(result) >= 1:
return Transaction.from_serializable_dict(result[0])
return None
......
......@@ -8,9 +8,11 @@ import json
import hashlib
import logging
import requests
requests.packages.urllib3.disable_warnings()
from typing import Dict
LOGGER = logging.getLogger(__name__)
class MessageHandler:
......@@ -30,7 +32,7 @@ class MessageHandler:
self._rest_fetcher = rest_fetcher
def handle_generic(self, body):
LOGGER.info(f"Received message: {body}")
result = None
message = None
......@@ -39,11 +41,13 @@ class MessageHandler:
except (ValueError, TypeError):
result = self.MSG_NOT_JSON
LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
return result
if not 'type' in message:
result = self.MSG_NO_TYPE
LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
return result
if message['type'] == 'blockchain-transaction':
......@@ -51,8 +55,11 @@ class MessageHandler:
result = self.MSG_TRACE_PROCESSED
else:
result = self.MSG_NOT_PROCESSED
LOGGER.warning(result)
LOGGER.info(f"Received message: {body}")
LOGGER.info(result)
#LOGGER.info(result) #too much spam
return result
def _resolve_path(self, data: Dict, path:str) -> Dict:
......@@ -132,39 +139,39 @@ class MessageHandler:
'''
# check if there is a use-case in the message
if "ApplicationType" not in transaction_message.keys():
if "ApplicationType" not in transaction_message:
LOGGER.error("Transaction has no ApplicationType, storing it under use-case 'unknown'.")
transaction_message["ApplicationType"] = "unknown"
self._mongo_repo.add_failed_transaction(transaction_message)
return
# check if there is a doctype in the message
if "docType" not in transaction_message.keys():
LOGGER.error("Transaction has no docType, storing it under docType 'unknown'.")
# check if there is a table in the message
if "docType" not in transaction_message:
LOGGER.error("Transaction has no docType, storing it under table 'unknown'.")
transaction_message["docType"] = "unknown"
self._mongo_repo.add_failed_transaction(transaction_message)
return
use_case = transaction_message["ApplicationType"]
docType = transaction_message["docType"]
transaction_use_case = transaction_message["ApplicationType"]
transaction_table = transaction_message["docType"]
try:
tables = self._rest_fetcher.fetch_schema_information(use_case)
tables = self._rest_fetcher.fetch_schema_information(transaction_use_case)
except ValueError as e:
LOGGER.error(f"{e}\nStoring it as a failed transaction.")
LOGGER.error(f"{e}\nCould not fetch schema, storing it as a failed transaction..")
self._mongo_repo.add_failed_transaction(transaction_message)
return
target_table = None
# find correct table
for table in tables:
if table["name"] == docType:
if table["name"] == transaction_table:
target_table = table
break
# abort if table does not exist.
if target_table == None:
LOGGER.error(f"There is no table '{docType}', storing it as a failed transaction.")
LOGGER.error(f"There is no table '{transaction_table}', storing it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message)
return
......@@ -172,20 +179,19 @@ class MessageHandler:
try:
flattened = self._flatten_transaction(transaction_message, mappings)
except KeyError as e:
LOGGER.error(f"Failed while flattening with KeyError: {str(e)}")
LOGGER.error(f"Failed while flattening with KeyError: {str(e)}, storing it as a failed transaction.")
self._mongo_repo.add_failed_transaction(transaction_message)
return
transaction = Transaction(use_case, target_table["name"], flattened)
transaction = Transaction(transaction_use_case, target_table["name"], flattened)
#check for duplicates
try:
reference = self._mongo_repo.get_transaction_with_id(transaction.id())
if reference != None:
if (reference[0].table == transaction.table) and (reference[0].use_case == transaction.use_case):
LOGGER.error("Found duplicate")
self._mongo_repo.add_duplicated_transaction(transaction)
return
references = self._mongo_repo.get_transaction_with_id(transaction.id(),transaction_use_case,transaction_table)
if references != None:
LOGGER.info("Found duplicate, storing it as a duplicated transaction.")
self._mongo_repo.add_duplicated_transaction(transaction)
return
except ValueError as e:
LOGGER.error(f"{e}, could not insert duplicated node.")
return
......@@ -193,14 +199,14 @@ class MessageHandler:
try:
self._mongo_repo.add_transaction(transaction)
except KeyError as e:
LOGGER.error(f"{e}")
self._mongo_repo.add_failed_transaction(transaction_message)
LOGGER.error(f"{e}, ignored {transaction_message}")
# self._mongo_repo.add_failed_transaction(transaction_message)
return
msg = {
"type": "new-trace",
"content": transaction.to_serializable_dict(),
"content": transaction.to_serializable_dict()
}
msg_string = json.dumps(msg)
......
......@@ -23,7 +23,8 @@ def delete_all_failed_for_use_case(use_case: str):
return Response(status=200)
def all_duplicated_for_use_case(use_case: str):
return _repository.all_duplicated_transactions_for_use_case(use_case)
transactions = _repository.all_duplicated_transactions_for_use_case(use_case)
return [t.to_serializable_dict() for t in transactions]
def delete_all_duplicated_for_use_case(use_case: str):
_repository.delete_all_duplicated_transactions(use_case)
......
......@@ -3,6 +3,10 @@ import sys
for path in ['../', './']:
sys.path.insert(1, path)
#####################################
### Don't include for test report ###
#####################################
try:
class TestCoverage(unittest.TestCase):
def test_init_main(self):
......
......@@ -18,14 +18,14 @@ class DummyMongoRepo:
def add_transaction(self, transaction):
self.added_transactions.append(transaction)
def get_transaction_with_id(self, unique_id: str):
def get_transaction_with_id(self, unique_id: str, use_case,table):
result = []
for trans in self.added_transactions:
transID = trans.id()
if transID == unique_id:
if transID == unique_id and trans.use_case == use_case and trans.table == table:
result.append(trans)
if len(result) > 0:
return result
return result[0]
return None
......@@ -170,7 +170,7 @@ class Test_MessageHandler(unittest.TestCase):
self.assertEqual('semantic-linking', self.msg_sender.last_message['key'])
self.assertEqual('new-trace', json.loads(self.msg_sender.last_message['msg'])["type"])
def test_handleblockchain_duplicateTrace(self):
def test_handleBlockchainTransaction_duplicateTrace_oneTransAddedToDuplicateRepo(self):
msg = self._get_valid_message()
msg2 = self._get_valid_message()
msg = eval(msg)
......@@ -178,8 +178,9 @@ class Test_MessageHandler(unittest.TestCase):
self.handler.handle_blockchain_transaction(msg['content'])
self.handler.handle_blockchain_transaction(msg2['content'])
self.assertEqual(len(self.repo.added_transactions),len(self.repo.duplicated_transactions))
self.assertEqual(len(self.repo.added_transactions),1)
def test_handleblockchain_duplicateTraceDifferentTable(self):
def test_handleBlockchainTransaction_duplicateTraceDifferentTable_bothTransactionsAddedAsUnique(self):
msg = self._get_valid_message()
msg2 = self._get_valid_message2()
msg = eval(msg)
......@@ -188,7 +189,7 @@ class Test_MessageHandler(unittest.TestCase):
self.handler.handle_blockchain_transaction(msg2['content'])
self.assertEqual(len(self.repo.added_transactions),2)
def test_handleblockchain_duplicateTraceDifferentUseCase(self):
def test_handleBlockchainTransaction_duplicateTraceDifferentUseCase_bothTransactionsAddedAsUnique(self):
msg = self._get_valid_message()
msg2 = self._get_valid_message3()
msg = eval(msg)
......@@ -197,5 +198,24 @@ class Test_MessageHandler(unittest.TestCase):
self.handler.handle_blockchain_transaction(msg2['content'])
self.assertEqual(len(self.repo.added_transactions),2)
def test_handleBlockchainTransaction_multipleTransactions3SameIdDiffUseCaseTable_3AddedUnique2Duplicate(self):
#print("Entered Test: 3Unique 2Dupli")
msg = self._get_valid_message()
msg2 = self._get_valid_message2()
msg3 = self._get_valid_message3()
msg4 = self._get_valid_message3()
msg5 = self._get_valid_message3()
msg = eval(msg)
msg2 = eval(msg2)
msg3 = eval(msg3)
msg4 = eval(msg4)
msg5 = eval(msg5)
self.handler.handle_blockchain_transaction(msg['content'])
self.handler.handle_blockchain_transaction(msg2['content'])
self.handler.handle_blockchain_transaction(msg3['content'])
self.handler.handle_blockchain_transaction(msg4['content'])
self.handler.handle_blockchain_transaction(msg5['content'])
self.assertEqual(len(self.repo.added_transactions),3)
self.assertEqual(len(self.repo.duplicated_transactions),2)
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
import requests
requests.packages.urllib3.disable_warnings()
from icecream import ic
def httpget(url):
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0yNCAxMDoxMzo1MS4wMjkwNDkiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMjUgMTA6MTM6NTEuMDI5MDQ5In0.V6kYV5Lmb_tUIsF-6AKNB8_lIifmJP_Dm8gHhGa5w_o'
res = requests.get(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"})
return res
# list tables
res = httpget(url = 'https://articonf1.itec.aau.at:30420/api/use-cases/crowd-journalism/tables')
print("Tables: ", [entry['name'] for entry in res.json()])
# count pushed data
def count_data(json_res, table_identifier='table'):
tables = {}
for entry in json_res:
key = entry[table_identifier]
if key not in tables:
tables[key] = 0
tables[key] += 1
ic(tables)
res = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions')
count_data(res.json())
res_f = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions-failed')
count_data(res_f.json(), 'docType')
# failed tags: the "tag" is missing, but is called name
# failed purchases: duplicate keys generated from (userid, videoid, ownerid)
# failed classifications: impact is missing
# visualize content
import matplotlib.pyplot as plt
def visualize_video_coordinates():
geolocations = []
for entry in res.json():
if entry['table'] != 'video':
continue
loc_ = entry['properties']['geolocation'].split(',')
if loc_[0] == 'undefined' or loc_[1] == 'undefined':
continue
geolocations.append(loc_)
plt.scatter([float(coor[0]) for coor in geolocations], [float(coor[1]) for coor in geolocations])
plt.axis('off')
plt.show()
# visualize_video_coordinates()
def visualize_video_prices():
price = []
for entry in res.json():
if entry['table'] != 'video':
continue
price.append(entry['properties']['price'])
from collections import Counter
print(Counter(price))
plt.hist(price, bins=len(set(price)))
plt.show()
# visualize_video_prices()
def visualize_content_ratings():
impact = []
informative = []
trustiness = []
for entry in res.json():
if entry['table'] != 'classification':
continue
if entry['properties']['impact'] is not None:
impact.append(entry['properties']['impact'])
if entry['properties']['informative'] is not None:
informative.append(entry['properties']['informative'])
if entry['properties']['trustiness'] is not None:
trustiness.append(entry['properties']['trustiness'])
from collections import Counter
print(Counter(impact))
print(Counter(informative))
print(Counter(trustiness))
fig, (ax1, ax2, ax3) = plt.subplots(3)
ax1.hist(impact, bins=len(set(impact)))
ax1.set_title('impact')
ax2.hist(informative, bins=len(set(informative)))
ax2.set_title('informative')
ax3.hist(trustiness, bins=len(set(trustiness)))
ax3.set_title('trustiness')
plt.show()
# visualize_content_ratings()
# counting duplicate entries for 'purchase'
working_purchase_ids = [(entry['properties']['userid'], entry['properties']['videoid'], entry['properties']['ownerid']) for entry in res.json() if entry['table'] == 'purchase']
failed_purchase_ids = [(entry['userid'], entry['videoid'], entry['ownerid']) for entry in res_f.json() if entry['docType'] == 'purchase']
ic(len(working_purchase_ids))
ic(len(failed_purchase_ids))
cnt = 0
for failed_id in failed_purchase_ids:
if failed_id in working_purchase_ids:
cnt += 1
ic(cnt)
\ No newline at end of file
import requests
requests.packages.urllib3.disable_warnings()
from icecream import ic
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0yNCAxMDoxMzo1MS4wMjkwNDkiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMjUgMTA6MTM6NTEuMDI5MDQ5In0.V6kYV5Lmb_tUIsF-6AKNB8_lIifmJP_Dm8gHhGa5w_o'
def httpget(url):
res = requests.get(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"})
return res
res_f = httpget(url = 'https://articonf1.itec.aau.at:30001/api/use_cases/crowd-journalism/transactions-failed')
failed_purchases = []
for entry in res_f.json():
if entry['docType'] == 'purchase':
failed_purchases.append(entry)
print(len(failed_purchases))
# upload again
def httppost_gateway(content_):
url = 'https://articonf1.itec.aau.at:30401/api/trace'
res = requests.post(url,
verify=False,
headers = { "Authorization": f"Bearer {token}"},
json=content_)
return res
for purchase in failed_purchases:
res = httppost_gateway(purchase)
print(res)
\ No newline at end of file
......@@ -18,7 +18,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def send_transaction_to_rest_gateway(transaction: dict):
# token from Rest Gateway to authorize
JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMi0wOCAxMzo0NzoxOC40NzUxMjEiLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDItMDkgMTM6NDc6MTguNDc1MTIxIn0.DWY9c0X2XQJDz0Ef35-k1IVY6GWf00ogaVOCeX8Irlo'
JWT_TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InJlZ3VsYXJAaXRlYy5hYXUuYXQiLCJjcmVhdGVkX2F0IjoiMjAyMS0wMy0xNiAxMzoxNDoyMS42MDc1NjciLCJ2YWxpZF91bnRpbCI6IjIwMjEtMDMtMTcgMTM6MTQ6MjEuNjA3NTY3In0.ZGObriEDWYo1BgiYN3pQSosS7UuNrq10GSCSjmRHSAw'
res = requests.post(
url = 'https://articonf1.itec.aau.at:30401/api/trace',
......@@ -47,7 +47,6 @@ if __name__ == '__main__':
transaction['ApplicationType'] = 'reddit'
transaction['docType'] = 'reddit'
for key, value in obj_dict.items():
transaction[key] = value
......@@ -56,6 +55,7 @@ if __name__ == '__main__':
if (summ % 1000 == 0 ):
print ("Uploaded " + str(summ) + " transactions.")
if summ >= 1:
break
print ("TOTAL Uploaded " + str(summ) + " transactions.")
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment