Commit b028f0df authored by Alexander Lercher's avatar Alexander Lercher

Minor fixes

parent 259e6339
...@@ -15,8 +15,7 @@ repo = Repository() ...@@ -15,8 +15,7 @@ repo = Repository()
def get_clusters(layer_file) -> List[Cluster]: def get_clusters(layer_file) -> List[Cluster]:
with open(layer_file, 'r') as file: with open(layer_file, 'r') as file:
clusters = json.loads(file.read()) clusters = json.loads(file.read())
return [Cluster(cluster_dict=cluster, from_db=False) for cluster in clusters]
return [Cluster(cluster_dict=cluster, from_db=True) for cluster in clusters]
def store_generic_clusters(clusters: List[Cluster], layer): def store_generic_clusters(clusters: List[Cluster], layer):
...@@ -38,7 +37,7 @@ def store_generic_clusters(clusters: List[Cluster], layer): ...@@ -38,7 +37,7 @@ def store_generic_clusters(clusters: List[Cluster], layer):
print(f"failed uploading {layer}") print(f"failed uploading {layer}")
layers = ['Position_Layer.json', 'Solar_Production_Layer.json'] layers = ['User_Demand_Layer.json']
for layer in layers: for layer in layers:
clusts: List[Cluster] = get_clusters(layer) clusts: List[Cluster] = get_clusters(layer)
# print(len(clusts)) # print(len(clusts))
......
...@@ -89,8 +89,8 @@ def _fetch_nodes(use_case: str, table: str, layer_name: str) -> List[Dict]: ...@@ -89,8 +89,8 @@ def _fetch_nodes(use_case: str, table: str, layer_name: str) -> List[Dict]:
def fetch_nodes_from_semantic_linking(): def fetch_nodes_from_semantic_linking():
'''Empties the db and inserts layers and nodes from BusinessLogic and SemanticLinking''' '''Empties the db and inserts layers and nodes from BusinessLogic and SemanticLinking'''
repository = Repository() repository = Repository()
repository.delete_all_layers() # repository.delete_all_layers()
repository.delete_all_nodes() # repository.delete_all_nodes()
use_cases = _fetch_use_cases() use_cases = _fetch_use_cases()
for use_case in use_cases: for use_case in use_cases:
...@@ -99,6 +99,9 @@ def fetch_nodes_from_semantic_linking(): ...@@ -99,6 +99,9 @@ def fetch_nodes_from_semantic_linking():
tables = _fetch_tables(use_case) tables = _fetch_tables(use_case)
for table in tables: for table in tables:
if table != 'bank-app':
continue
layers = _fetch_layers(use_case, table) layers = _fetch_layers(use_case, table)
for layer in layers: for layer in layers:
......
...@@ -17,12 +17,10 @@ repo = Repository() ...@@ -17,12 +17,10 @@ repo = Repository()
def run_generic_clustering(): def run_generic_clustering():
'''Runs the clustering for all layers found in the repository.''' '''Runs the clustering for all layers found in the repository.'''
all_layers:List[Layer] = repo.get_layers() all_layers:List[Layer] = repo.get_layers()
# all_layers = [l for l in all_layers if l.layer_name in ['!Heating_Consumption_Layer', 'Position_Layer', '!Solar_Production_Layer']] all_layers = [l for l in all_layers
if l.layer_name in ['User_Demand_Layer'] and l.use_case == 'smart-energy']
for layer in all_layers: for layer in all_layers:
# if layer.use_case != 'smart-energy':
# continue
print(f"Clustering {layer.use_case}//{layer.use_case_table}//{layer.layer_name}.") print(f"Clustering {layer.use_case}//{layer.use_case_table}//{layer.layer_name}.")
if layer.properties is None or len(layer.properties) == 0: if layer.properties is None or len(layer.properties) == 0:
...@@ -53,7 +51,7 @@ def run_clustering_for_layer(layer: Layer) -> List[Cluster]: ...@@ -53,7 +51,7 @@ def run_clustering_for_layer(layer: Layer) -> List[Cluster]:
def store_generic_clusters(clusters: List[Cluster], layer): def store_generic_clusters(clusters: List[Cluster], layer):
try: try:
with open(f'{layer}.json', 'w') as file: with open(f'{layer}.json', 'w') as file:
cluster_dicts = [c.to_serializable_dict(for_db=True) for c in clusters] cluster_dicts = [c.to_serializable_dict(for_db=False) for c in clusters]
file.write(json.dumps(cluster_dicts)) file.write(json.dumps(cluster_dicts))
except: except:
pass pass
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment